diff --git a/pom.xml b/pom.xml index 7227da3581..bf0f0ff103 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 2.1.0.BUILD-SNAPSHOT + 2.1.0.DATAMONGO-1311-SNAPSHOT pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index 9baccaa905..3f401adef3 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 2.1.0.BUILD-SNAPSHOT + 2.1.0.DATAMONGO-1311-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb-cross-store/pom.xml b/spring-data-mongodb-cross-store/pom.xml index 47a5b7aba7..e6e340fcbe 100644 --- a/spring-data-mongodb-cross-store/pom.xml +++ b/spring-data-mongodb-cross-store/pom.xml @@ -6,7 +6,7 @@ org.springframework.data spring-data-mongodb-parent - 2.1.0.BUILD-SNAPSHOT + 2.1.0.DATAMONGO-1311-SNAPSHOT ../pom.xml @@ -50,7 +50,7 @@ org.springframework.data spring-data-mongodb - 2.1.0.BUILD-SNAPSHOT + 2.1.0.DATAMONGO-1311-SNAPSHOT diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index e5c865ea08..5e74626582 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -13,7 +13,7 @@ org.springframework.data spring-data-mongodb-parent - 2.1.0.BUILD-SNAPSHOT + 2.1.0.DATAMONGO-1311-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index b86dc2808c..348663bf1f 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 2.1.0.BUILD-SNAPSHOT + 2.1.0.DATAMONGO-1311-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java index f0ac8e53a2..dd152a9170 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java @@ -3279,9 +3279,9 @@ public FindIterable prepare(FindIterable cursor) { return cursor; } + Meta meta = query.getMeta(); if (query.getSkip() <= 0 && query.getLimit() <= 0 && ObjectUtils.isEmpty(query.getSortObject()) - && !StringUtils.hasText(query.getHint()) && !query.getMeta().hasValues() - && !query.getCollation().isPresent()) { + && !StringUtils.hasText(query.getHint()) && !meta.hasValues() && !query.getCollation().isPresent()) { return cursor; } @@ -3301,18 +3301,33 @@ public FindIterable prepare(FindIterable cursor) { cursorToUse = cursorToUse.sort(sort); } - Document meta = new Document(); if (StringUtils.hasText(query.getHint())) { - meta.put("$hint", query.getHint()); + cursorToUse = cursorToUse.hint(Document.parse(query.getHint())); } - if (query.getMeta().hasValues()) { + if (meta.hasValues()) { - for (Entry entry : query.getMeta().values()) { - meta.put(entry.getKey(), entry.getValue()); + if (StringUtils.hasText(meta.getComment())) { + cursorToUse = cursorToUse.comment(meta.getComment()); } - for (Meta.CursorOption option : query.getMeta().getFlags()) { + if (meta.getSnapshot()) { + cursorToUse = cursorToUse.snapshot(meta.getSnapshot()); + } + + if (meta.getMaxScan() != null) { + cursorToUse = cursorToUse.maxScan(meta.getMaxScan()); + } + + if (meta.getMaxTimeMsec() != null) { + cursorToUse = cursorToUse.maxTime(meta.getMaxTimeMsec(), TimeUnit.MILLISECONDS); + } + + if (meta.getCursorBatchSize() != null) { + cursorToUse = cursorToUse.batchSize(meta.getCursorBatchSize()); + } + + for (Meta.CursorOption option : meta.getFlags()) { switch (option) { @@ -3328,7 +3343,6 @@ public FindIterable prepare(FindIterable cursor) { } } - cursorToUse = cursorToUse.modifiers(meta); } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } @@ -3459,149 +3473,6 @@ public MongoDbFactory getMongoDbFactory() { return mongoDbFactory; } - /** - * {@link BatchAggregationLoader} is a little helper that can process cursor results returned by an aggregation - * command execution. On presence of a {@literal nextBatch} indicated by presence of an {@code id} field in the - * {@code cursor} another {@code getMore} command gets executed reading the next batch of documents until all results - * are loaded. - * - * @author Christoph Strobl - * @since 1.10 - */ - static class BatchAggregationLoader { - - private static final String CURSOR_FIELD = "cursor"; - private static final String RESULT_FIELD = "result"; - private static final String BATCH_SIZE_FIELD = "batchSize"; - private static final String FIRST_BATCH = "firstBatch"; - private static final String NEXT_BATCH = "nextBatch"; - private static final String SERVER_USED = "serverUsed"; - private static final String OK = "ok"; - - private final MongoTemplate template; - private final ReadPreference readPreference; - private final int batchSize; - - BatchAggregationLoader(MongoTemplate template, ReadPreference readPreference, int batchSize) { - - this.template = template; - this.readPreference = readPreference; - this.batchSize = batchSize; - } - - /** - * Run aggregation command and fetch all results. - */ - Document aggregate(String collectionName, Aggregation aggregation, AggregationOperationContext context) { - - Document command = prepareAggregationCommand(collectionName, aggregation, context, batchSize); - - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing aggregation: {}", serializeToJsonSafely(command)); - } - - return mergeAggregationResults(aggregateBatched(command, collectionName, batchSize)); - } - - /** - * Pre process the aggregation command sent to the server by adding {@code cursor} options to match execution on - * different server versions. - */ - private static Document prepareAggregationCommand(String collectionName, Aggregation aggregation, - @Nullable AggregationOperationContext context, int batchSize) { - - AggregationOperationContext rootContext = context == null ? Aggregation.DEFAULT_CONTEXT : context; - Document command = aggregation.toDocument(collectionName, rootContext); - - if (!aggregation.getOptions().isExplain()) { - command.put(CURSOR_FIELD, new Document(BATCH_SIZE_FIELD, batchSize)); - } - - return command; - } - - private List aggregateBatched(Document command, String collectionName, int batchSize) { - - List results = new ArrayList<>(); - - Document commandResult = template.executeCommand(command, readPreference); - results.add(postProcessResult(commandResult)); - - while (hasNext(commandResult)) { - - Document getMore = new Document("getMore", getNextBatchId(commandResult)) // - .append("collection", collectionName) // - .append(BATCH_SIZE_FIELD, batchSize); - - commandResult = template.executeCommand(getMore, this.readPreference); - results.add(postProcessResult(commandResult)); - } - - return results; - } - - private static Document postProcessResult(Document commandResult) { - - if (!commandResult.containsKey(CURSOR_FIELD)) { - return commandResult; - } - - Document resultObject = new Document(SERVER_USED, commandResult.get(SERVER_USED)); - resultObject.put(OK, commandResult.get(OK)); - - Document cursor = (Document) commandResult.get(CURSOR_FIELD); - if (cursor.containsKey(FIRST_BATCH)) { - resultObject.put(RESULT_FIELD, cursor.get(FIRST_BATCH)); - } else { - resultObject.put(RESULT_FIELD, cursor.get(NEXT_BATCH)); - } - - return resultObject; - } - - private static Document mergeAggregationResults(List batchResults) { - - if (batchResults.size() == 1) { - return batchResults.iterator().next(); - } - - Document commandResult = new Document(); - List allResults = new ArrayList<>(); - - for (Document batchResult : batchResults) { - - Collection documents = (Collection) batchResult.get(RESULT_FIELD); - if (!CollectionUtils.isEmpty(documents)) { - allResults.addAll(documents); - } - } - - // take general info from first batch - commandResult.put(SERVER_USED, batchResults.iterator().next().get(SERVER_USED)); - commandResult.put(OK, batchResults.iterator().next().get(OK)); - - // and append the merged batchResults - commandResult.put(RESULT_FIELD, allResults); - - return commandResult; - } - - private static boolean hasNext(Document commandResult) { - - if (!commandResult.containsKey(CURSOR_FIELD)) { - return false; - } - - Object next = getNextBatchId(commandResult); - return next != null && ((Number) next).longValue() != 0L; - } - - @Nullable - private static Object getNextBatchId(Document commandResult) { - return ((Document) commandResult.get(CURSOR_FIELD)).get("id"); - } - } - /** * {@link MongoTemplate} extension bound to a specific {@link ClientSession} that is applied when interacting with the * server through the driver API. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java index 2a5ba2a8a2..17bb1e20c8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java @@ -98,6 +98,7 @@ import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.data.mongodb.core.query.Meta; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; @@ -115,7 +116,6 @@ import org.springframework.util.ResourceUtils; import org.springframework.util.StringUtils; -import com.mongodb.BasicDBObject; import com.mongodb.ClientSessionOptions; import com.mongodb.CursorType; import com.mongodb.DBCollection; @@ -971,7 +971,6 @@ protected Flux aggregate(Aggregation aggregation, String collectionName, List pipeline = aggregationUtil.createPipeline(aggregation, rootContext); Assert.isTrue(!options.isExplain(), "Cannot use explain option with streaming!"); - Assert.isNull(options.getCursorBatchSize(), "Cannot use batchSize cursor option with streaming!"); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(pipeline), collectionName); @@ -987,6 +986,10 @@ private Flux aggregateAndMap(MongoCollection collection, List cursor = collection.aggregate(pipeline, Document.class) .allowDiskUse(options.isAllowDiskUse()); + if (options.getCursorBatchSize() != null) { + cursor = cursor.batchSize(options.getCursorBatchSize()); + } + if (options.getCollation().isPresent()) { cursor = cursor.collation(options.getCollation().map(Collation::toMongoCollation).get()); } @@ -3216,8 +3219,9 @@ public FindPublisher prepare(FindPublisher findPublisher) { findPublisherToUse = query.getCollation().map(Collation::toMongoCollation).map(findPublisher::collation) .orElse(findPublisher); + Meta meta = query.getMeta(); if (query.getSkip() <= 0 && query.getLimit() <= 0 && ObjectUtils.isEmpty(query.getSortObject()) - && !StringUtils.hasText(query.getHint()) && !query.getMeta().hasValues()) { + && !StringUtils.hasText(query.getHint()) && !meta.hasValues()) { return findPublisherToUse; } @@ -3232,21 +3236,34 @@ public FindPublisher prepare(FindPublisher findPublisher) { Document sort = type != null ? getMappedSortObject(query, type) : query.getSortObject(); findPublisherToUse = findPublisherToUse.sort(sort); } - BasicDBObject modifiers = new BasicDBObject(); if (StringUtils.hasText(query.getHint())) { - modifiers.append("$hint", query.getHint()); + findPublisherToUse = findPublisherToUse.hint(Document.parse(query.getHint())); } - if (query.getMeta().hasValues()) { - for (Entry entry : query.getMeta().values()) { - modifiers.append(entry.getKey(), entry.getValue()); + if (meta.hasValues()) { + + if (StringUtils.hasText(meta.getComment())) { + findPublisherToUse = findPublisherToUse.comment(meta.getComment()); + } + + if (meta.getSnapshot()) { + findPublisherToUse = findPublisherToUse.snapshot(meta.getSnapshot()); } - } - if (!modifiers.isEmpty()) { - findPublisherToUse = findPublisherToUse.modifiers(modifiers); + if (meta.getMaxScan() != null) { + findPublisherToUse = findPublisherToUse.maxScan(meta.getMaxScan()); + } + + if (meta.getMaxTimeMsec() != null) { + findPublisherToUse = findPublisherToUse.maxTime(meta.getMaxTimeMsec(), TimeUnit.MILLISECONDS); + } + + if (meta.getCursorBatchSize() != null) { + findPublisherToUse = findPublisherToUse.batchSize(meta.getCursorBatchSize()); + } } + } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoExampleMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoExampleMapper.java index f9343eee69..84f597cc54 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoExampleMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoExampleMapper.java @@ -164,13 +164,14 @@ private void applyPropertySpecs(String path, Document source, Class probeType if (exampleSpecAccessor.hasPropertySpecifier(mappedPropertyPath)) { PropertyValueTransformer valueTransformer = exampleSpecAccessor.getValueTransformerForPath(mappedPropertyPath); - value = valueTransformer.convert(value); - if (value == null) { + Optional converted = valueTransformer.apply(Optional.ofNullable(value)); + + if(!converted.isPresent()) { iter.remove(); continue; } - entry.setValue(value); + entry.setValue(converted.get()); } if (entry.getValue() instanceof String) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java index a9b4ba4869..99fba90824 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java @@ -15,6 +15,7 @@ */ package org.springframework.data.mongodb.core.query; +import java.time.Duration; import java.util.Collections; import java.util.LinkedHashMap; import java.util.LinkedHashSet; @@ -50,6 +51,7 @@ private enum MetaKey { private final Map values = new LinkedHashMap(2); private final Set flags = new LinkedHashSet(); + private Integer cursorBatchSize; /** * @return {@literal null} if not set. @@ -65,7 +67,7 @@ public Long getMaxTimeMsec() { * @param maxTimeMsec */ public void setMaxTimeMsec(long maxTimeMsec) { - setMaxTime(maxTimeMsec, TimeUnit.MILLISECONDS); + setMaxTime(Duration.ofMillis(maxTimeMsec)); } /** @@ -73,11 +75,25 @@ public void setMaxTimeMsec(long maxTimeMsec) { * * @param timeout * @param timeUnit + * @deprecated since 2.1. Use {@link #setMaxTime(Duration)} instead. */ + @Deprecated public void setMaxTime(long timeout, @Nullable TimeUnit timeUnit) { setValue(MetaKey.MAX_TIME_MS.key, (timeUnit != null ? timeUnit : TimeUnit.MILLISECONDS).toMillis(timeout)); } + /** + * Set the maximum time limit for processing operations. + * + * @param timeout must not be {@literal null}. + * @since 2.1 + */ + public void setMaxTime(Duration timeout) { + + Assert.notNull(timeout, "Timeout must not be null!"); + setValue(MetaKey.MAX_TIME_MS.key, timeout.toMillis()); + } + /** * @return {@literal null} if not set. */ @@ -90,13 +106,15 @@ public Long getMaxScan() { * Only scan the specified number of documents. * * @param maxScan + * @deprecated since 2.1 due to deprecation in MongoDB 4.0. */ + @Deprecated public void setMaxScan(long maxScan) { setValue(MetaKey.MAX_SCAN.key, maxScan); } /** - * Add a comment to the query. + * Add a comment to the query that is propagated to the profile log. * * @param comment */ @@ -116,7 +134,9 @@ public String getComment() { * Using snapshot prevents the cursor from returning a document more than once. * * @param useSnapshot + * @deprecated since 2.1 due to deprecation as of MongoDB 3.6 */ + @Deprecated public void setSnapshot(boolean useSnapshot) { setValue(MetaKey.SNAPSHOT.key, useSnapshot); } @@ -128,6 +148,27 @@ public boolean getSnapshot() { return getValue(MetaKey.SNAPSHOT.key, false); } + /** + * @return {@literal null} if not set. + * @since 2.1 + */ + @Nullable + public Integer getCursorBatchSize() { + return cursorBatchSize; + } + + /** + * Apply the batch size (number of documents to return in each response) for a query.
+ * Use {@literal 0 (zero)} for no limit. A negative limit closes the cursor after returning a single + * batch indicating to the server that the client will not ask for a subsequent one. + * + * @param cursorBatchSize The number of documents to return per batch. + * @since 2.1 + */ + public void setCursorBatchSize(int cursorBatchSize) { + this.cursorBatchSize = cursorBatchSize; + } + /** * Add {@link CursorOption} influencing behavior of the {@link com.mongodb.DBCursor}. * @@ -153,7 +194,7 @@ public Set getFlags() { * @return */ public boolean hasValues() { - return !this.values.isEmpty() || !this.flags.isEmpty(); + return !this.values.isEmpty() || !this.flags.isEmpty() || this.cursorBatchSize != null; } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java index af8a616137..7dddee90d3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java @@ -18,6 +18,7 @@ import static org.springframework.data.mongodb.core.query.SerializationUtils.*; import static org.springframework.util.ObjectUtils.*; +import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; @@ -289,7 +290,7 @@ public String getHint() { /** * @param maxTimeMsec - * @return + * @return this. * @see Meta#setMaxTimeMsec(long) * @since 1.6 */ @@ -302,22 +303,38 @@ public Query maxTimeMsec(long maxTimeMsec) { /** * @param timeout * @param timeUnit - * @return + * @return this. * @see Meta#setMaxTime(long, TimeUnit) * @since 1.6 + * @deprecated since 2.1. Use {@link #maxTime(Duration)} instead. */ + @Deprecated public Query maxTime(long timeout, TimeUnit timeUnit) { meta.setMaxTime(timeout, timeUnit); return this; } + /** + * @param timeout + * @return this. + * @see Meta#setMaxTime(Duration) + * @since 2.1 + */ + public Query maxTime(Duration timeout) { + + meta.setMaxTime(timeout); + return this; + } + /** * @param maxScan - * @return + * @return this. * @see Meta#setMaxScan(long) * @since 1.6 + * @deprecated since 2.1 due to deprecation in MongoDB 4.0. */ + @Deprecated public Query maxScan(long maxScan) { meta.setMaxScan(maxScan); @@ -325,8 +342,10 @@ public Query maxScan(long maxScan) { } /** + * Add a comment to the query that is propagated to the profile log. + * * @param comment - * @return + * @return this. * @see Meta#setComment(String) * @since 1.6 */ @@ -337,10 +356,12 @@ public Query comment(String comment) { } /** - * @return + * @return this. * @see Meta#setSnapshot(boolean) * @since 1.6 + * @deprecated since 2.1 due to deprecation as of MongoDB 3.6 */ + @Deprecated public Query useSnapshot() { meta.setSnapshot(true); @@ -348,7 +369,23 @@ public Query useSnapshot() { } /** - * @return + * Set the number of documents to return in each response batch.
+ * Use {@literal 0 (zero)} for no limit. A negative limit closes the cursor after returning a single + * batch indicating to the server that the client will not ask for a subsequent one. + * + * @param batchSize The number of documents to return per batch. + * @return this. + * @see Meta#setCursorBatchSize(int) + * @since 2.1 + */ + public Query cursorBatchSize(int batchSize) { + + meta.setCursorBatchSize(batchSize); + return this; + } + + /** + * @return this. * @see org.springframework.data.mongodb.core.query.Meta.CursorOption#NO_TIMEOUT * @since 1.10 */ @@ -359,7 +396,7 @@ public Query noCursorTimeout() { } /** - * @return + * @return this. * @see org.springframework.data.mongodb.core.query.Meta.CursorOption#EXHAUST * @since 1.10 */ @@ -370,7 +407,9 @@ public Query exhaust() { } /** - * @return + * Allows querying of a replica slave. + * + * @return this. * @see org.springframework.data.mongodb.core.query.Meta.CursorOption#SLAVE_OK * @since 1.10 */ @@ -381,7 +420,7 @@ public Query slaveOk() { } /** - * @return + * @return this. * @see org.springframework.data.mongodb.core.query.Meta.CursorOption#PARTIAL * @since 1.10 */ @@ -392,7 +431,7 @@ public Query partialResults() { } /** - * @return never {@literal null}. + * @return never {@literal null}.ΓΈ * @since 1.6 */ public Meta getMeta() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Meta.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Meta.java index c350151b95..068c6d250e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Meta.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/Meta.java @@ -21,7 +21,6 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; -import org.springframework.core.annotation.AliasFor; import org.springframework.data.annotation.QueryAnnotation; /** @@ -50,6 +49,16 @@ */ long maxScanDocuments() default -1; + /** + * Sets the number of documents to return per batch.
+ * Use {@literal 0 (zero)} for no limit. A negative limit closes the cursor after returning a single + * batch indicating to the server that the client will not ask for a subsequent one. + * + * @return {@literal 0 (zero)} by default. + * @since 2.1 + */ + int cursorBatchSize() default 0; + /** * Add a comment to the query. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessor.java index d7d42d9c32..f95dee9a38 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoParametersParameterAccessor.java @@ -19,6 +19,7 @@ import java.util.List; import org.springframework.data.domain.Range; +import org.springframework.data.domain.Range.Bound; import org.springframework.data.geo.Distance; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.query.Term; @@ -66,9 +67,9 @@ public Range getDistanceRange() { } int maxDistanceIndex = mongoParameters.getMaxDistanceIndex(); - Distance maxDistance = maxDistanceIndex == -1 ? null : (Distance) getValue(maxDistanceIndex); + Bound maxDistance = maxDistanceIndex == -1 ? Bound.unbounded() : Bound.inclusive((Distance) getValue(maxDistanceIndex)); - return new Range(null, maxDistance); + return Range.of(Bound.unbounded(), maxDistance); } /* diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java index eb61f3c858..31bc144f4b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/MongoQueryMethod.java @@ -160,8 +160,7 @@ public MongoEntityMetadata getEntityInformation() { MongoPersistentEntity collectionEntity = domainClass.isAssignableFrom(returnedObjectType) ? returnedEntity : managedEntity; - this.metadata = new SimpleMongoEntityMetadata<>((Class) returnedEntity.getType(), - collectionEntity); + this.metadata = new SimpleMongoEntityMetadata<>((Class) returnedEntity.getType(), collectionEntity); } } @@ -274,6 +273,10 @@ public org.springframework.data.mongodb.core.query.Meta getQueryMetaAttributes() metaAttributes.setMaxScan(meta.maxScanDocuments()); } + if (meta.cursorBatchSize() != 0) { + metaAttributes.setCursorBatchSize(meta.cursorBatchSize()); + } + if (StringUtils.hasText(meta.comment())) { metaAttributes.setComment(meta.comment()); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java index 8bdaf46ffe..96c70cfb67 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java @@ -22,7 +22,6 @@ import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import static org.springframework.data.mongodb.test.util.IsBsonObject.*; -import com.mongodb.client.model.ReplaceOptions; import lombok.Data; import java.math.BigInteger; @@ -93,6 +92,7 @@ import com.mongodb.client.model.DeleteOptions; import com.mongodb.client.model.FindOneAndDeleteOptions; import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.ReplaceOptions; import com.mongodb.client.model.UpdateOptions; import com.mongodb.client.result.UpdateResult; @@ -138,7 +138,6 @@ public void setUp() { when(collection.withReadPreference(any())).thenReturn(collection); when(findIterable.projection(any())).thenReturn(findIterable); when(findIterable.sort(any(org.bson.Document.class))).thenReturn(findIterable); - when(findIterable.modifiers(any(org.bson.Document.class))).thenReturn(findIterable); when(findIterable.collation(any())).thenReturn(findIterable); when(findIterable.limit(anyInt())).thenReturn(findIterable); when(mapReduceIterable.collation(any())).thenReturn(mapReduceIterable); @@ -161,7 +160,7 @@ public void rejectsNullDatabaseName() throws Exception { new MongoTemplate(mongo, null); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1968 + @Test(expected = IllegalArgumentException.class) // DATAMONGO-1968 public void rejectsNullMongo() { new MongoTemplate((MongoClient) null, "database"); } @@ -653,6 +652,17 @@ public void onBeforeConvert(BeforeConvertEvent event) { assertThat(updateCaptor.getValue(), isBsonObject().containing("$set.jon", "snow").notContaining("$isolated")); } + @Test // DATAMONGO-1311 + public void executeQueryShouldUseBatchSizeWhenPresent() { + + when(findIterable.batchSize(anyInt())).thenReturn(findIterable); + + Query query = new Query().cursorBatchSize(1234); + template.find(query, Person.class); + + verify(findIterable).batchSize(1234); + } + @Test // DATAMONGO-1518 public void executeQueryShouldUseCollationWhenPresent() { @@ -722,7 +732,8 @@ public void findAndRemoveShouldUseCollationWhenPresent() { @Test // DATAMONGO-1518 public void findAndRemoveManyShouldUseCollationWhenPresent() { - template.doRemove("collection-1", new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class, true); + template.doRemove("collection-1", new BasicQuery("{}").collation(Collation.of("fr")), AutogenerateableId.class, + true); ArgumentCaptor options = ArgumentCaptor.forClass(DeleteOptions.class); verify(collection).deleteMany(any(), options.capture()); @@ -969,7 +980,7 @@ private MongoTemplate mockOutGetDb() { return template; } - /* + /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.core.MongoOperationsUnitTests#getOperations() */ diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java index 9ab53dbed8..a7bed9adab 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/QueryCursorPreparerUnitTests.java @@ -15,8 +15,6 @@ */ package org.springframework.data.mongodb.core; -import static org.hamcrest.core.IsEqual.*; -import static org.junit.Assert.*; import static org.mockito.Mockito.*; import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; @@ -27,13 +25,12 @@ import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; -import org.mockito.ArgumentCaptor; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import org.springframework.data.mongodb.MongoDbFactory; import org.springframework.data.mongodb.core.MongoTemplate.QueryCursorPreparer; -import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Meta; import org.springframework.data.mongodb.core.query.Query; @@ -53,13 +50,16 @@ public class QueryCursorPreparerUnitTests { @Mock MongoExceptionTranslator exceptionTranslatorMock; @Mock FindIterable cursor; - @Mock FindIterable cursorToUse; - @Before public void setUp() { when(factory.getExceptionTranslator()).thenReturn(exceptionTranslatorMock); - when(cursor.modifiers(any(Document.class))).thenReturn(cursor); + when(cursor.batchSize(anyInt())).thenReturn(cursor); + when(cursor.comment(anyString())).thenReturn(cursor); + when(cursor.maxTime(anyLong(), any())).thenReturn(cursor); + when(cursor.maxScan(anyLong())).thenReturn(cursor); + when(cursor.hint(any())).thenReturn(cursor); + when(cursor.snapshot(anyBoolean())).thenReturn(cursor); when(cursor.noCursorTimeout(anyBoolean())).thenReturn(cursor); when(cursor.collation(any())).thenReturn(cursor); } @@ -67,13 +67,10 @@ public void setUp() { @Test // DATAMONGO-185 public void appliesHintsCorrectly() { - Query query = query(where("foo").is("bar")).withHint("hint"); - + Query query = query(where("foo").is("bar")).withHint("{ age: 1 }"); prepare(query); - ArgumentCaptor captor = ArgumentCaptor.forClass(Document.class); - verify(cursor).modifiers(captor.capture()); - assertThat(captor.getValue(), equalTo(new Document("$hint", "hint"))); + verify(cursor).hint(new Document("age", 1)); } @Test // DATAMONGO-957 @@ -84,55 +81,43 @@ public void doesNotApplyMetaWhenEmpty() { prepare(query); - verify(cursorToUse, never()).modifiers(any(Document.class)); + verify(cursor, never()).modifiers(any(Document.class)); } @Test // DATAMONGO-957 public void appliesMaxScanCorrectly() { Query query = query(where("foo").is("bar")).maxScan(100); - prepare(query); - ArgumentCaptor captor = ArgumentCaptor.forClass(Document.class); - verify(cursor).modifiers(captor.capture()); - assertThat(captor.getValue(), equalTo(new Document("$maxScan", 100L))); + verify(cursor).maxScan(100); } @Test // DATAMONGO-957 public void appliesMaxTimeCorrectly() { Query query = query(where("foo").is("bar")).maxTime(1, TimeUnit.SECONDS); - prepare(query); - ArgumentCaptor captor = ArgumentCaptor.forClass(Document.class); - verify(cursor).modifiers(captor.capture()); - assertThat(captor.getValue(), equalTo(new Document("$maxTimeMS", 1000L))); + verify(cursor).maxTime(1000, TimeUnit.MILLISECONDS); } @Test // DATAMONGO-957 public void appliesCommentCorrectly() { Query query = query(where("foo").is("bar")).comment("spring data"); - prepare(query); - ArgumentCaptor captor = ArgumentCaptor.forClass(Document.class); - verify(cursor).modifiers(captor.capture()); - assertThat(captor.getValue(), equalTo(new Document("$comment", "spring data"))); + verify(cursor).comment("spring data"); } @Test // DATAMONGO-957 public void appliesSnapshotCorrectly() { Query query = query(where("foo").is("bar")).useSnapshot(); - prepare(query); - ArgumentCaptor captor = ArgumentCaptor.forClass(Document.class); - verify(cursor).modifiers(captor.capture()); - assertThat(captor.getValue(), equalTo(new Document("$snapshot", true))); + verify(cursor).snapshot(true); } @Test // DATAMONGO-1480 @@ -153,6 +138,14 @@ public void appliesCollationCorrectly() { verify(cursor).collation(eq(com.mongodb.client.model.Collation.builder().locale("fr").build())); } + @Test // DATAMONGO-1311 + public void appliesBatchSizeCorrectly() { + + prepare(new BasicQuery("{}").cursorBatchSize(100)); + + verify(cursor).batchSize(100); + } + private FindIterable prepare(Query query) { CursorPreparer preparer = new MongoTemplate(factory).new QueryCursorPreparer(query, null); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java index fd44ad5635..2c7c528548 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java @@ -19,9 +19,7 @@ import static org.junit.Assert.*; import static org.mockito.Mockito.*; import static org.mockito.Mockito.any; -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; -import com.mongodb.client.model.ReplaceOptions; import lombok.Data; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; @@ -44,20 +42,22 @@ import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.MongoTemplateUnitTests.AutogenerateableId; import org.springframework.data.mongodb.core.ReactiveMongoTemplate.NoOpDbRefResolver; -import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.NearQuery; +import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; import org.springframework.test.util.ReflectionTestUtils; import com.mongodb.client.model.DeleteOptions; import com.mongodb.client.model.FindOneAndDeleteOptions; import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.ReplaceOptions; import com.mongodb.client.model.UpdateOptions; +import com.mongodb.reactivestreams.client.AggregatePublisher; import com.mongodb.reactivestreams.client.FindPublisher; import com.mongodb.reactivestreams.client.MongoClient; import com.mongodb.reactivestreams.client.MongoCollection; @@ -79,6 +79,7 @@ public class ReactiveMongoTemplateUnitTests { @Mock MongoDatabase db; @Mock MongoCollection collection; @Mock FindPublisher findPublisher; + @Mock AggregatePublisher aggregatePublisher; @Mock Publisher runCommandPublisher; MongoExceptionTranslator exceptionTranslator = new MongoExceptionTranslator(); @@ -95,10 +96,15 @@ public void setUp() { when(db.runCommand(any(), any(Class.class))).thenReturn(runCommandPublisher); when(collection.find(any(Class.class))).thenReturn(findPublisher); when(collection.find(any(Document.class), any(Class.class))).thenReturn(findPublisher); + when(collection.aggregate(anyList())).thenReturn(aggregatePublisher); + when(collection.aggregate(anyList(), any(Class.class))).thenReturn(aggregatePublisher); when(findPublisher.projection(any())).thenReturn(findPublisher); when(findPublisher.limit(anyInt())).thenReturn(findPublisher); when(findPublisher.collation(any())).thenReturn(findPublisher); when(findPublisher.first()).thenReturn(findPublisher); + when(aggregatePublisher.allowDiskUse(anyBoolean())).thenReturn(aggregatePublisher); + when(aggregatePublisher.collation(any())).thenReturn(aggregatePublisher); + when(aggregatePublisher.first()).thenReturn(findPublisher); this.mappingContext = new MongoMappingContext(); this.converter = new MappingMongoConverter(new NoOpDbRefResolver(), mappingContext); @@ -136,6 +142,17 @@ public void autogeneratesIdForMap() { }).verifyComplete(); } + @Test // DATAMONGO-1311 + public void executeQueryShouldUseBatchSizeWhenPresent() { + + when(findPublisher.batchSize(anyInt())).thenReturn(findPublisher); + + Query query = new Query().cursorBatchSize(1234); + template.find(query, Person.class).subscribe(); + + verify(findPublisher).batchSize(1234); + } + @Test // DATAMONGO-1518 public void findShouldUseCollationWhenPresent() { @@ -245,20 +262,6 @@ public void replaceOneShouldUseCollationWhenPresent() { assertThat(options.getValue().getCollation().getLocale(), is("fr")); } - @Ignore("currently no aggregation") - @Test // DATAMONGO-1518 - public void aggregateShouldUseCollationWhenPresent() { - - Aggregation aggregation = newAggregation(project("id")) - .withOptions(newAggregationOptions().collation(Collation.of("fr")).build()); - // template.aggregate(aggregation, AutogenerateableId.class, Document.class).subscribe(); - - ArgumentCaptor cmd = ArgumentCaptor.forClass(Document.class); - verify(db).runCommand(cmd.capture(), any(Class.class)); - - assertThat(cmd.getValue().get("collation", Document.class), equalTo(new Document("locale", "fr"))); - } - @Ignore("currently no mapReduce") @Test // DATAMONGO-1518 public void mapReduceShouldUseCollationWhenPresent() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationUnitTests.java index d751f1320d..9b83ef1be1 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReactiveAggregationUnitTests.java @@ -18,6 +18,8 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.*; +import static org.mockito.Mockito.anyInt; +import static org.mockito.Mockito.anyList; import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import org.bson.Document; @@ -80,16 +82,6 @@ public void shouldHandleMissingEntityClass() { template.aggregate(newAggregation(), INPUT_COLLECTION, null); } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1646 - public void errorsOnCursorBatchSizeUsage() { - - template.aggregate( - newAggregation(Product.class, // - project("name", "netPrice")) // - .withOptions(AggregationOptions.builder().cursorBatchSize(10).build()), - INPUT_COLLECTION, TagCount.class).subscribe(); - } - @Test(expected = IllegalArgumentException.class) // DATAMONGO-1646 public void errorsOnExplainUsage() { @@ -101,6 +93,20 @@ public void errorsOnExplainUsage() { .subscribe(); } + @Test // DATAMONGO-1646, DATAMONGO-1311 + public void appliesBatchSizeWhenPresent() { + + when(publisher.batchSize(anyInt())).thenReturn(publisher); + + AggregationOptions options = AggregationOptions.builder().cursorBatchSize(1234).build(); + template.aggregate(newAggregation(Product.class, // + project("name", "netPrice")) // + .withOptions(options), + INPUT_COLLECTION, TagCount.class).subscribe(); + + verify(publisher).batchSize(1234); + } + @Test // DATAMONGO-1646 public void appliesCollationCorrectlyWhenPresent() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryMethodUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryMethodUnitTests.java index 4ea7793dfe..522ab32e12 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryMethodUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryMethodUnitTests.java @@ -146,6 +146,24 @@ public void createsMongoQueryMethodWithMaxExecutionTimeCorrectly() throws Except assertThat(method.getQueryMetaAttributes().getMaxTimeMsec(), is(100L)); } + @Test // DATAMONGO-1311 + public void createsMongoQueryMethodWithBatchSizeCorrectly() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "batchSize"); + + assertThat(method.hasQueryMetaAttributes(), is(true)); + assertThat(method.getQueryMetaAttributes().getCursorBatchSize(), is(100)); + } + + @Test // DATAMONGO-1311 + public void createsMongoQueryMethodWithNegativeBatchSizeCorrectly() throws Exception { + + MongoQueryMethod method = queryMethod(PersonRepository.class, "negativeBatchSize"); + + assertThat(method.hasQueryMetaAttributes(), is(true)); + assertThat(method.getQueryMetaAttributes().getCursorBatchSize(), is(-200)); + } + @Test // DATAMONGO-1403 public void createsMongoQueryMethodWithSpellFixedMaxExecutionTimeCorrectly() throws Exception { @@ -233,6 +251,12 @@ interface PersonRepository extends Repository { @Meta List emptyMetaAnnotation(); + @Meta(cursorBatchSize = 100) + List batchSize(); + + @Meta(cursorBatchSize = -200) + List negativeBatchSize(); + @Meta(maxExecutionTimeMs = 100) List metaWithMaxExecutionTime(); diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc index 72816866fd..e400777415 100644 --- a/src/main/asciidoc/reference/mongodb.adoc +++ b/src/main/asciidoc/reference/mongodb.adoc @@ -1761,6 +1761,35 @@ GeoResults results = mongoOps.query(SWCharacter.class) ---- ==== +[[mongo.query.additional-query-options]] +=== Additional Query Options + +MongoDB offers various ways of applying meta information, like a comment or a batch size, to a query. Using the `Query` API +directly there are several methods for those options. + +==== +[source,java] +---- +Query query = query(where("firstname").is("luke")) + .comment("find luke") <1> + .batchSize(100) <2> + .slaveOk(); <3> +---- +<1> The comment propagated to the MongoDB profile log. +<2> The number of documents to return in each response batch. +<3> Allows querying a replica slave. +==== + +On the repository level the `@Meta` annotation provides means to add query options in a declarative way. + +==== +[source,java] +---- +@Meta(comment = "find luke", batchSize = 100, flags = { SLAVE_OK }) +List findByFirstname(String firstname); +---- +==== + include::../{spring-data-commons-docs}/query-by-example.adoc[leveloffset=+1] include::query-by-example.adoc[leveloffset=+1]