From 5cffb3c07c3fa015479d032dbe7d6aec80a31808 Mon Sep 17 00:00:00 2001 From: David Julia Date: Sun, 27 Jun 2021 23:08:43 +1000 Subject: [PATCH 001/885] Fix Regression in generating queries with nested maps with numeric keys. While maps that have numeric keys work if there is only one map with an integer key, when there are multiple maps with numeric keys in a given query, it fails. Take the following example for a map called outer with numeric keys holding reference to another object with a map called inner with numeric keys: Updates that are meant to generate {"$set": {"outerMap.1234.inner.5678": "hello"}} are instead generating {"$set": {"outerMap.1234.inner.inner": "hello"}}, repeating the later map property name instead of using the integer key value. This commit adds unit tests both for the UpdateMapper and QueryMapper, which check multiple consecutive maps with numeric keys, and adds a fix in the KeyMapper. Because we cannot easily change the path parsing to somehow parse path parts corresponding to map keys differently, we address the issue in the KeyMapper. We keep track of the partial path corresponding to the current property and use it to skip adding the duplicated property name for the map to the query, and instead add the key. This is a bit redundant in that we now have both an iterator and an index-based way of accessing the path parts, but it gets the tests passing and fixes the issue without making a large change to the current approach. Fixes: #3688 Original Pull Request: #3689 --- .../mongodb/core/convert/QueryMapper.java | 22 ++++++++++++--- .../core/convert/QueryMapperUnitTests.java | 27 +++++++++++++++++++ .../core/convert/UpdateMapperUnitTests.java | 15 +++++++++++ 3 files changed, 61 insertions(+), 3 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index 08f6458e95..e1682fa6e9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -69,6 +69,7 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author David Julia */ public class QueryMapper { @@ -1367,11 +1368,17 @@ public TypeInformation getTypeHint() { static class KeyMapper { private final Iterator iterator; + private int currentIndex; + private String currentPropertyRoot; + private final List pathParts; public KeyMapper(String key, MappingContext, MongoPersistentProperty> mappingContext) { - this.iterator = Arrays.asList(key.split("\\.")).iterator(); + this.pathParts = Arrays.asList(key.split("\\.")); + this.currentPropertyRoot = pathParts.get(0); + this.currentIndex = 0; + this.iterator = pathParts.iterator(); this.iterator.next(); } @@ -1389,16 +1396,25 @@ protected String mapPropertyName(MongoPersistentProperty property) { while (inspect) { String partial = iterator.next(); + currentIndex++; - boolean isPositional = isPositionalParameter(partial) && property.isCollectionLike(); + boolean isPositional = isPositionalParameter(partial) && property.isCollectionLike() ; + if(property.isMap() && currentPropertyRoot.equals(partial) && iterator.hasNext()){ + partial = iterator.next(); + currentIndex++; + } - if (isPositional || property.isMap()) { + if (isPositional || property.isMap() && !currentPropertyRoot.equals(partial)) { mappedName.append(".").append(partial); } inspect = isPositional && iterator.hasNext(); } + if(currentIndex + 1 < pathParts.size()) { + currentIndex++; + currentPropertyRoot = pathParts.get(currentIndex); + } return mappedName.toString(); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index ba883d14c4..efd354b866 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -72,6 +72,7 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author David Julia */ public class QueryMapperUnitTests { @@ -730,6 +731,28 @@ void mappingShouldRetainNumericMapKey() { assertThat(document).containsKey("map.1.stringProperty"); } + @Test // GH-3688 + void mappingShouldRetainNestedNumericMapKeys() { + + Query query = query(where("outerMap.1.map.2.stringProperty").is("ba'alzamon")); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(EntityWithIntKeyedMapOfMap.class)); + + assertThat(document).containsKey("outerMap.1.map.2.stringProperty"); + } + + @Test // GH-3688 + void mappingShouldAllowSettingEntireNestedNumericKeyedMapValue() { + + Query query = query(where("outerMap.1.map").is(null)); //newEntityWithComplexValueTypeMap() + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(EntityWithIntKeyedMapOfMap.class)); + + assertThat(document).containsKey("outerMap.1.map"); + } + @Test // DATAMONGO-1269 void mappingShouldRetainNumericPositionInList() { @@ -1467,6 +1490,10 @@ static class EntityWithComplexValueTypeMap { Map map; } + static class EntityWithIntKeyedMapOfMap{ + Map outerMap; + } + static class EntityWithComplexValueTypeList { List list; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java index a8d5f12b9f..bba9811e56 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java @@ -67,6 +67,7 @@ * @author Thomas Darimont * @author Mark Paluch * @author Pavel Vodrazka + * @author David Julia */ @ExtendWith(MockitoExtension.class) class UpdateMapperUnitTests { @@ -1179,6 +1180,16 @@ void numericKeyInMapOfNestedPath() { assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"map.601218778970110001827396.value\": \"testing\"}}"); } + @Test // GH-3688 + void multipleNumericKeysInNestedPath() { + + Update update = new Update().set("intKeyedMap.12345.map.0", "testing"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithIntKeyedMap.class)); + + assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"intKeyedMap.12345.map.0\": \"testing\"}}"); + } + @Test // GH-3566 void mapsObjectClassPropertyFieldInMapValueTypeAsKey() { @@ -1425,6 +1436,10 @@ static class EntityWithObjectMap { Map concreteMap; } + static class EntityWithIntKeyedMap{ + Map intKeyedMap; + } + static class ClassWithEnum { Allocation allocation; From ef29e69a87022db0ca0e475dc4b276dccab0597d Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 5 Jul 2021 10:28:39 +0200 Subject: [PATCH 002/885] Polishing. Simplify KeyMapper current property/index setup. Original Pull Request: #3689 --- .../data/mongodb/core/convert/QueryMapper.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index e1682fa6e9..7a14f07c4c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -1376,10 +1376,9 @@ public KeyMapper(String key, MappingContext, MongoPersistentProperty> mappingContext) { this.pathParts = Arrays.asList(key.split("\\.")); - this.currentPropertyRoot = pathParts.get(0); - this.currentIndex = 0; this.iterator = pathParts.iterator(); - this.iterator.next(); + this.currentPropertyRoot = iterator.next(); + this.currentIndex = 0; } /** @@ -1391,6 +1390,7 @@ public KeyMapper(String key, protected String mapPropertyName(MongoPersistentProperty property) { StringBuilder mappedName = new StringBuilder(PropertyToFieldNameConverter.INSTANCE.convert(property)); + boolean inspect = iterator.hasNext(); while (inspect) { From 4f65bb0810cceeaf1fbf783aecd13c3ebff20aab Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 22 Jun 2021 14:03:52 +0200 Subject: [PATCH 003/885] Fix mapping context setup to include simple type holder. Original pull request: #3687. Resolves #3656 --- .../data/mongodb/core/MongoTemplateUnitTests.java | 1 + .../data/mongodb/core/ReactiveMongoTemplateUnitTests.java | 2 ++ .../core/convert/DbRefMappingMongoConverterUnitTests.java | 1 + .../data/mongodb/core/convert/MappingMongoConverterTests.java | 2 ++ .../mongodb/core/convert/MappingMongoConverterUnitTests.java | 1 + .../data/mongodb/core/query/BasicQueryUnitTests.java | 4 ++++ .../mongodb/test/util/MongoTestTemplateConfiguration.java | 2 ++ 7 files changed, 13 insertions(+) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java index c3ee9b32ff..68c83a2757 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java @@ -206,6 +206,7 @@ void beforeEach() { this.mappingContext = new MongoMappingContext(); mappingContext.setAutoIndexCreation(true); + mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); mappingContext.afterPropertiesSet(); this.converter = spy(new MappingMongoConverter(new DefaultDbRefResolver(factory), mappingContext)); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java index 931ea75cea..5c5a307f1d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java @@ -23,6 +23,7 @@ import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; @@ -189,6 +190,7 @@ void beforeEach() { when(aggregatePublisher.first()).thenReturn(findPublisher); this.mappingContext = new MongoMappingContext(); + this.mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); this.template = new ReactiveMongoTemplate(factory, converter); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java index d5285e7d2e..b677d90df5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java @@ -85,6 +85,7 @@ void setUp() { this.dbRefResolver = spy(new DefaultDbRefResolver(dbFactory)); this.mappingContext = new MongoMappingContext(); + this.mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); this.converter = new MappingMongoConverter(dbRefResolver, mappingContext); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java index 2b17ed4b06..c9a4937125 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java @@ -31,6 +31,7 @@ import java.time.ZoneId; import java.time.temporal.ChronoUnit; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -80,6 +81,7 @@ void setUp() { dbRefResolver = spy(new DefaultDbRefResolver(factory)); mappingContext = new MongoMappingContext(); + mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); mappingContext.setInitialEntitySet(new HashSet<>( Arrays.asList(WithLazyDBRefAsConstructorArg.class, WithLazyDBRef.class, WithJavaTimeTypes.class))); mappingContext.setAutoIndexCreation(false); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index c8e2fec155..369f6dbdef 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -1702,6 +1702,7 @@ void convertsJava8DateTimeTypesToDateAndBack() { } @Test // DATAMONGO-1128 + @Disabled("really we should find a solution for this") void writesOptionalsCorrectly() { TypeWithOptional type = new TypeWithOptional(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicQueryUnitTests.java index 1a7477f099..d9ef1cce30 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicQueryUnitTests.java @@ -24,6 +24,9 @@ import org.bson.Document; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledForJreRange; +import org.junit.jupiter.api.condition.DisabledOnJre; +import org.junit.jupiter.api.condition.JRE; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; @@ -64,6 +67,7 @@ public void overridesSortCorrectly() { } @Test // DATAMONGO-1093 + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "EqualsVerifier uses reflection on Optional") public void equalsContract() { BasicQuery query1 = new BasicQuery("{ \"name\" : \"Thomas\"}", "{\"name\":1, \"age\":1}"); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java index 2d2dedc2ee..ee75da8b19 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java @@ -121,6 +121,8 @@ MongoMappingContext mappingContext() { mappingContext.setAutoIndexCreation(mappingContextConfigurer.autocreateIndex); if(mongoConverterConfigurer.customConversions != null) { mappingContext.setSimpleTypeHolder(mongoConverterConfigurer.customConversions.getSimpleTypeHolder()); + } else { + mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); } mappingContext.afterPropertiesSet(); } From 403f0019d55dd5b70d7132549f82acb8ae16d6bb Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 23 Jun 2021 15:45:14 +0200 Subject: [PATCH 004/885] Fix Optional handling in query creation and result processing. Original pull request: #3687. Resolves #3656 --- .../core/mapping/MongoMappingContext.java | 6 ++++ .../mapping/MongoMappingContextUnitTests.java | 31 +++++++++++++++++++ ...tractPersonRepositoryIntegrationTests.java | 13 ++++++++ .../mongodb/repository/PersonRepository.java | 2 ++ .../repository/PersonSummaryWithOptional.java | 24 ++++++++++++++ 5 files changed, 76 insertions(+) create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryWithOptional.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java index c204434809..38381fb994 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java @@ -26,6 +26,7 @@ import org.springframework.data.mapping.model.Property; import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.util.NullableWrapperConverters; import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; @@ -69,6 +70,11 @@ public void setFieldNamingStrategy(@Nullable FieldNamingStrategy fieldNamingStra */ @Override protected boolean shouldCreatePersistentEntityFor(TypeInformation type) { + + if(NullableWrapperConverters.supports(type.getType())) { + return false; + } + return !MongoSimpleTypes.HOLDER.isSimpleType(type.getType()) && !AbstractMap.class.isAssignableFrom(type.getType()); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoMappingContextUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoMappingContextUnitTests.java index a7e454c52a..f0cc1a2f32 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoMappingContextUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoMappingContextUnitTests.java @@ -22,6 +22,7 @@ import java.util.Collections; import java.util.Locale; import java.util.Map; +import java.util.Optional; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -35,6 +36,7 @@ import org.springframework.data.mapping.model.FieldNamingStrategy; import com.mongodb.DBRef; +import org.springframework.data.util.TypeInformation; /** * Unit tests for {@link MongoMappingContext}. @@ -173,6 +175,26 @@ void shouldNotCreateEntityForEnum() { assertThat(context.getPersistentEntity(ChronoUnit.class)).isNull(); } + @Test // GH-3656 + void shouldNotCreateEntityForOptionalGetter() { + + MongoMappingContext context = new MongoMappingContext(); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(InterfaceWithMethodReturningOptional.class); + + assertThat(context.getPersistentEntities()).map(it -> it.getType()).doesNotContain((Class) + Optional.class).contains((Class)Person.class); + } + + @Test // GH-3656 + void shouldNotCreateEntityForOptionalField() { + + MongoMappingContext context = new MongoMappingContext(); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(ClassWithOptionalField.class); + + assertThat(context.getPersistentEntities()).map(it -> it.getType()).doesNotContain((Class) + Optional.class).contains((Class)Person.class); + } + public class SampleClass { Map children; @@ -244,4 +266,13 @@ class ClassWithChronoUnit { ChronoUnit unit; } + + interface InterfaceWithMethodReturningOptional { + + Optional getPerson(); + } + + class ClassWithOptionalField { + Optional person; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java index d576913850..e462458ae8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java @@ -1460,4 +1460,17 @@ void executesQueryWithDocumentReferenceCorrectly() { List result = repository.findBySpiritAnimal(dave); assertThat(result).map(Person::getId).containsExactly(josh.getId()); } + + @Test //GH-3656 + void resultProjectionWithOptionalIsExcecutedCorrectly() { + + carter.setAddress(new Address("batman", "robin", "gotham")); + repository.save(carter); + + PersonSummaryWithOptional result = repository.findSummaryWithOptionalByLastname("Beauford"); + + assertThat(result).isNotNull(); + assertThat(result.getAddress()).isPresent(); + assertThat(result.getFirstname()).contains("Carter"); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java index 155cf7a7b9..9ac1282088 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java @@ -307,6 +307,8 @@ Page findByCustomQueryLastnameAndAddressStreetInList(String lastname, Li // DATAMONGO-1030 PersonSummaryDto findSummaryByLastname(String lastname); + PersonSummaryWithOptional findSummaryWithOptionalByLastname(String lastname); + @Query("{ ?0 : ?1 }") List findByKeyValue(String key, String value); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryWithOptional.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryWithOptional.java new file mode 100644 index 0000000000..d6a98752bb --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryWithOptional.java @@ -0,0 +1,24 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.util.Optional; + +public interface PersonSummaryWithOptional { + + Optional
getAddress(); + Optional getFirstname(); +} From 81bc3c599b1dc0bcc03bf409c92eb7a6fb814e7d Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Fri, 25 Jun 2021 13:40:55 +0200 Subject: [PATCH 005/885] Disable tests on Java 16 that require class-based proxies. Original pull request: #3687. Resolves #3656 --- .../core/mapping/MongoMappingContext.java | 4 ++-- .../DbRefMappingMongoConverterUnitTests.java | 6 ++++++ ...RepositoryLazyLoadingIntegrationTests.java | 20 +++++++++---------- .../reference/document-references.adoc | 6 ++++++ 4 files changed, 24 insertions(+), 12 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java index 38381fb994..121658b065 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java @@ -71,7 +71,7 @@ public void setFieldNamingStrategy(@Nullable FieldNamingStrategy fieldNamingStra @Override protected boolean shouldCreatePersistentEntityFor(TypeInformation type) { - if(NullableWrapperConverters.supports(type.getType())) { + if (NullableWrapperConverters.supports(type.getType())) { return false; } @@ -139,7 +139,7 @@ public MongoPersistentEntity getPersistentEntity(MongoPersistentProperty pers MongoPersistentEntity entity = super.getPersistentEntity(persistentProperty); - if(entity == null || !persistentProperty.isUnwrapped()) { + if (entity == null || !persistentProperty.isUnwrapped()) { return entity; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java index b677d90df5..56ab37a0f6 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java @@ -36,6 +36,8 @@ import org.bson.types.ObjectId; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledForJreRange; +import org.junit.jupiter.api.condition.JRE; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.Mockito; @@ -181,6 +183,7 @@ void lazyLoadingProxyForLazyDbRefOnInterface() { } @Test // DATAMONGO-348 + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg. ArrayList require to open java.util.") void lazyLoadingProxyForLazyDbRefOnConcreteCollection() { String id = "42"; @@ -508,6 +511,7 @@ void shouldNotEagerlyResolveIdPropertyWithPropertyAccess() { } @Test // DATAMONGO-1076 + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg. ArrayList require to open java.util.") void shouldNotTriggerResolvingOfLazyLoadedProxyWhenFinalizeMethodIsInvoked() throws Exception { MongoPersistentEntity entity = mappingContext @@ -526,6 +530,7 @@ void shouldNotTriggerResolvingOfLazyLoadedProxyWhenFinalizeMethodIsInvoked() thr } @Test // DATAMONGO-1194 + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg. ArrayList require to open java.util.") void shouldBulkFetchListOfReferences() { String id1 = "1"; @@ -576,6 +581,7 @@ void shouldBulkFetchSetOfReferencesForConstructorCreation() { } @Test // DATAMONGO-1194 + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg. ArrayList require to open java.util.") void shouldFallbackToOneByOneFetchingWhenElementsInListOfReferencesPointToDifferentCollections() { String id1 = "1"; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryLazyLoadingIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryLazyLoadingIntegrationTests.java index 5cc8e82599..4ca82abf57 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryLazyLoadingIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryLazyLoadingIntegrationTests.java @@ -22,14 +22,15 @@ import java.util.Arrays; import java.util.List; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; - +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledForJreRange; +import org.junit.jupiter.api.condition.JRE; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; /** * Integration test for {@link PersonRepository} for lazy loading support. @@ -38,13 +39,13 @@ * @author Oliver Gierke */ @ContextConfiguration(locations = "PersonRepositoryIntegrationTests-context.xml") -@RunWith(SpringRunner.class) +@ExtendWith(SpringExtension.class) public class PersonRepositoryLazyLoadingIntegrationTests { @Autowired PersonRepository repository; @Autowired MongoOperations operations; - @Before + @BeforeEach public void setUp() throws InterruptedException { repository.deleteAll(); @@ -61,7 +62,6 @@ public void shouldLoadAssociationWithDbRefOnInterfaceAndLazyLoadingEnabled() thr Person person = new Person(); person.setFirstname("Oliver"); person.setFans(Arrays.asList(thomas)); - person.setRealFans(new ArrayList(Arrays.asList(thomas))); repository.save(person); Person oliver = repository.findById(person.id).get(); @@ -75,7 +75,8 @@ public void shouldLoadAssociationWithDbRefOnInterfaceAndLazyLoadingEnabled() thr } @Test // DATAMONGO-348 - public void shouldLoadAssociationWithDbRefOnConcreteCollectionAndLazyLoadingEnabled() throws Exception { + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg. ArrayList require to open java.util.") + public void shouldLoadAssociationWithDbRefOnConcreteCollectionAndLazyLoadingEnabled() { User thomas = new User(); thomas.username = "Thomas"; @@ -83,7 +84,6 @@ public void shouldLoadAssociationWithDbRefOnConcreteCollectionAndLazyLoadingEnab Person person = new Person(); person.setFirstname("Oliver"); - person.setFans(Arrays.asList(thomas)); person.setRealFans(new ArrayList(Arrays.asList(thomas))); repository.save(person); diff --git a/src/main/asciidoc/reference/document-references.adoc b/src/main/asciidoc/reference/document-references.adoc index 92badd2fa1..885d2d6ade 100644 --- a/src/main/asciidoc/reference/document-references.adoc +++ b/src/main/asciidoc/reference/document-references.adoc @@ -49,6 +49,9 @@ TIP: Lazily loaded ``DBRef``s can be hard to debug. Make sure tooling does not accidentally trigger proxy resolution by e.g. calling `toString()` or some inline debug rendering invoking property getters. Please consider to enable _trace_ logging for `org.springframework.data.mongodb.core.convert.DefaultDbRefResolver` to gain insight on `DBRef` resolution. +CAUTION: Lazy loading may require class proxies, that in turn, might need access to jdk internals, that are not open, starting with Java 16+, due to https://openjdk.java.net/jeps/396[JEP 396: Strongly Encapsulate JDK Internals by Default]. +For those cases please consider falling back to an interface type (eg. switch from `ArrayList` to `List`) or provide the required `--add-opens` argument. + [[mapping-usage.document-references]] === Using Document References @@ -136,6 +139,9 @@ Result order of `Collection` like properties is restored based on the used looku | Resolves properties eagerly by default. |=== +CAUTION: Lazy loading may require class proxies, that in turn, might need access to jdk internals, that are not open, starting with Java 16+, due to https://openjdk.java.net/jeps/396[JEP 396: Strongly Encapsulate JDK Internals by Default]. +For those cases please consider falling back to an interface type (eg. switch from `ArrayList` to `List`) or provide the required `--add-opens` argument. + `@DocumentReference(lookup)` allows defining filter queries that can be different from the `_id` field and therefore offer a flexible way of defining references between entities as demonstrated in the sample below, where the `Publisher` of a book is referenced by its acronym instead of the internal `id`. ==== From 7d0b070d1f8110b9ff5d976e55e49c30794e95f9 Mon Sep 17 00:00:00 2001 From: Oliver Drotbohm Date: Fri, 9 Jul 2021 15:26:44 +0200 Subject: [PATCH 006/885] Adapt to API consolidation in Spring Data Commons' PersistentProperty. Closes: #3700 Original Pull Request: #3701 Related to: spring-projects/spring-data-commons#2408 --- .../core/mapping/UnwrappedMongoPersistentProperty.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java index 8f24bab61b..a2194c173f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java @@ -147,6 +147,11 @@ public Iterable> getPersistentEntityTypes() { return delegate.getPersistentEntityTypes(); } + @Override + public Iterable> getPersistentEntityTypeInformation() { + return delegate.getPersistentEntityTypeInformation(); + } + @Override @Nullable public Method getGetter() { @@ -318,6 +323,11 @@ public Class getAssociationTargetType() { return delegate.getAssociationTargetType(); } + @Override + public TypeInformation getAssociationTargetTypeInformation() { + return delegate.getAssociationTargetTypeInformation(); + } + @Override public PersistentPropertyAccessor getAccessorForOwner(T owner) { return delegate.getAccessorForOwner(owner); From a6a2f0bde902e31ff41070e24a20c1af03b60edb Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 12 Jul 2021 14:56:44 +0200 Subject: [PATCH 007/885] Upgrade to MongoDB 4.3.0-beta4 Drivers. Closes: #3693 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index a6d5da9170..98de438544 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ multi spring-data-mongodb 2.6.0-SNAPSHOT - 4.2.3 + 4.3.0-beta4 ${mongo} 1.19 From 42ab7d2f6378488d8f23be6f056748c9eb406f19 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 13 Jul 2021 08:27:51 +0200 Subject: [PATCH 008/885] Adapt to changes in AssertJ 3.20. Closes #3705 --- .../mongodb/test/util/DocumentAssert.java | 49 +++++-------------- 1 file changed, 11 insertions(+), 38 deletions(-) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DocumentAssert.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DocumentAssert.java index db33b29b82..6f519c71e2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DocumentAssert.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DocumentAssert.java @@ -134,12 +134,12 @@ public DocumentAssert containsKey(String key) { return containsKeys(key); } - /* + /* * (non-Javadoc) - * @see org.assertj.core.api.AbstractMapAssert#containsKeys(java.lang.Object[]) + * @see org.assertj.core.api.AbstractMapAssert#containsKeysForProxy(java.lang.Object[]) */ @Override - public final DocumentAssert containsKeys(String... keys) { + protected DocumentAssert containsKeysForProxy(String[] keys) { Set notFound = new LinkedHashSet<>(); @@ -166,12 +166,12 @@ public DocumentAssert doesNotContainKey(String key) { return doesNotContainKeys(key); } - /* + /* * (non-Javadoc) - * @see org.assertj.core.api.AbstractMapAssert#doesNotContainKeys(java.lang.Object[]) + * @see org.assertj.core.api.AbstractMapAssert#doesNotContainKeysForProxy(java.lang.Object[]) */ @Override - public final DocumentAssert doesNotContainKeys(String... keys) { + protected DocumentAssert doesNotContainKeysForProxy(String[] keys) { Set found = new LinkedHashSet<>(); for (String key : keys) { @@ -191,13 +191,8 @@ public final DocumentAssert doesNotContainKeys(String... keys) { // used in soft assertions which need to be able to proxy method - @SafeVarargs requiring method to be final prevents // using proxies. - /* - * (non-Javadoc) - * @see org.assertj.core.api.AbstractMapAssert#contains(java.util.Map.Entry[]) - */ - @SafeVarargs @Override - public final DocumentAssert contains(Map.Entry... entries) { + protected DocumentAssert containsForProxy(Entry[] entries) { // if both actual and values are empty, then assertion passes. if (actual.isEmpty() && entries.length == 0) { @@ -216,14 +211,8 @@ public final DocumentAssert contains(Map.Entry... entries) { - + protected DocumentAssert containsAnyOfForProxy(Entry[] entries) { for (Map.Entry entry : entries) { if (containsEntry(entry)) { return myself; @@ -233,24 +222,13 @@ public final DocumentAssert containsAnyOf(Map.Entry... entries) { + protected DocumentAssert containsOnlyForProxy(Entry[] entries) { throw new UnsupportedOperationException(); } - /* - * (non-Javadoc) - * @see org.assertj.core.api.AbstractMapAssert#doesNotContain(java.util.Map.Entry[]) - */ - @SafeVarargs @Override - public final DocumentAssert doesNotContain(Map.Entry... entries) { - + protected DocumentAssert doesNotContainForProxy(Entry[] entries) { Set> found = new LinkedHashSet<>(); for (Map.Entry entry : entries) { @@ -265,13 +243,8 @@ public final DocumentAssert doesNotContain(Map.Entry... entries) { + protected DocumentAssert containsExactlyForProxy(Entry[] entries) { throw new UnsupportedOperationException(); } From 93b9f23b077cd60c43b22aa66b7431c2500ccf6a Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 13 Jul 2021 08:42:24 +0200 Subject: [PATCH 009/885] Polishing. Fix proxy comparison. See #3705 --- .../data/mongodb/core/convert/LazyLoadingProxyFactory.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java index f77b96c71f..887ddfe78d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java @@ -167,7 +167,7 @@ public Object intercept(Object o, Method method, Object[] args, MethodProxy prox } if (ReflectionUtils.isEqualsMethod(method)) { - return proxyEquals(proxy, args[0]); + return proxyEquals(o, args[0]); } if (ReflectionUtils.isHashCodeMethod(method)) { From 5bd7ff141355e98843a0eca990a6018458dc6557 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 14 Jul 2021 07:44:00 +0200 Subject: [PATCH 010/885] Upgrade to MongoDB 4.3.0 Drivers. Closes: #3706 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 98de438544..b688f3ee50 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ multi spring-data-mongodb 2.6.0-SNAPSHOT - 4.3.0-beta4 + 4.3.0 ${mongo} 1.19 From 986ea39f902f65653c77ab32153c97ff8f7efb7e Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Thu, 17 Jun 2021 10:50:13 +0200 Subject: [PATCH 011/885] Upgrade to Querydsl 5.0. Move off our own Querydsl copies, as Querydsl 5.0 ships MongoDB Document API support. Remove package-private duplicates of Querydsl code. Introduce SpringDataMongodbQuerySupport to provide a well-formatted toString representation of the actual query. Original Pull Request: #3674 --- .../support/MongodbDocumentSerializer.java | 459 ------------------ .../support/QuerydslAbstractMongodbQuery.java | 32 +- .../support/QuerydslAnyEmbeddedBuilder.java | 3 + .../QuerydslFetchableMongodbQuery.java | 272 ----------- .../support/QuerydslJoinBuilder.java | 67 --- .../repository/support/QuerydslMongoOps.java | 43 -- .../ReactiveSpringDataMongodbQuery.java | 79 ++- .../support/SpringDataMongodbQuery.java | 199 +++++++- .../SpringDataMongodbQuerySupport.java | 147 ++++++ .../support/SpringDataMongodbSerializer.java | 2 + .../SimpleReactiveMongoRepositoryTests.java | 3 +- 11 files changed, 383 insertions(+), 923 deletions(-) delete mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongodbDocumentSerializer.java delete mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslFetchableMongodbQuery.java delete mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslJoinBuilder.java delete mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoOps.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongodbDocumentSerializer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongodbDocumentSerializer.java deleted file mode 100644 index 3f0d281cc4..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongodbDocumentSerializer.java +++ /dev/null @@ -1,459 +0,0 @@ -/* - * Copyright 2018-2021 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.repository.support; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Queue; -import java.util.Set; -import java.util.regex.Pattern; - -import org.bson.BsonJavaScript; -import org.bson.BsonRegularExpression; -import org.bson.Document; -import org.bson.types.ObjectId; -import org.springframework.lang.Nullable; -import org.springframework.util.Assert; - -import com.mongodb.DBRef; -import com.querydsl.core.types.*; -import com.querydsl.mongodb.MongodbOps; - -/** - *

- * Serializes the given Querydsl query to a Document query for MongoDB. - *

- *

- * Original implementation source {@link com.querydsl.mongodb.MongodbSerializer} by {@literal The Querydsl Team} - * (http://www.querydsl.com/team) licensed under the Apache License, Version - * 2.0. - *

- * Modified to use {@link Document} instead of {@link com.mongodb.DBObject}, updated nullable types and code format. Use - * Bson specific types and add {@link QuerydslMongoOps#NO_MATCH}. - * - * @author laimw - * @author Mark Paluch - * @author Christoph Strobl - * @author Mikhail Kaduchka - * @author Enrique Leon Molina - * @since 2.1 - */ -abstract class MongodbDocumentSerializer implements Visitor { - - @Nullable - Object handle(Expression expression) { - return expression.accept(this, null); - } - - /** - * Create the MongoDB specific query document. - * - * @param predicate must not be {@literal null}. - * @return empty {@link Document} by default. - */ - Document toQuery(Predicate predicate) { - - Object value = handle(predicate); - - if (value == null) { - return new Document(); - } - - Assert.isInstanceOf(Document.class, value, - () -> String.format("Invalid type. Expected Document but found %s", value.getClass())); - - return (Document) value; - } - - /** - * Create the MongoDB specific sort document. - * - * @param orderBys must not be {@literal null}. - * @return empty {@link Document} by default. - */ - Document toSort(List> orderBys) { - - Document sort = new Document(); - - orderBys.forEach(orderSpecifier -> { - - Object key = orderSpecifier.getTarget().accept(this, null); - - Assert.notNull(key, () -> String.format("Mapped sort key for %s must not be null!", orderSpecifier)); - sort.append(key.toString(), orderSpecifier.getOrder() == Order.ASC ? 1 : -1); - }); - - return sort; - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.types.Visitor#visit(com.querydsl.core.types.Constant, java.lang.Void) - */ - @Override - public Object visit(Constant expr, Void context) { - - if (!Enum.class.isAssignableFrom(expr.getType())) { - return expr.getConstant(); - } - - @SuppressWarnings("unchecked") // Guarded by previous check - Constant> expectedExpr = (Constant>) expr; - return expectedExpr.getConstant().name(); - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.types.Visitor#visit(com.querydsl.core.types.TemplateExpression, java.lang.Void) - */ - @Override - public Object visit(TemplateExpression expr, Void context) { - throw new UnsupportedOperationException(); - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.types.Visitor#visit(com.querydsl.core.types.FactoryExpression, java.lang.Void) - */ - @Override - public Object visit(FactoryExpression expr, Void context) { - throw new UnsupportedOperationException(); - } - - protected String asDBKey(Operation expr, int index) { - - String key = (String) asDBValue(expr, index); - - Assert.hasText(key, () -> String.format("Mapped key must not be null nor empty for expression %s.", expr)); - return key; - } - - @Nullable - protected Object asDBValue(Operation expr, int index) { - return expr.getArg(index).accept(this, null); - } - - private String regexValue(Operation expr, int index) { - - Object value = expr.getArg(index).accept(this, null); - - Assert.notNull(value, () -> String.format("Regex for %s must not be null.", expr)); - return Pattern.quote(value.toString()); - } - - protected Document asDocument(String key, @Nullable Object value) { - return new Document(key, value); - } - - @SuppressWarnings("unchecked") - @Override - public Object visit(Operation expr, Void context) { - - Operator op = expr.getOperator(); - if (op == Ops.EQ) { - - if (expr.getArg(0) instanceof Operation) { - Operation lhs = (Operation) expr.getArg(0); - if (lhs.getOperator() == Ops.COL_SIZE || lhs.getOperator() == Ops.ARRAY_SIZE) { - return asDocument(asDBKey(lhs, 0), asDocument("$size", asDBValue(expr, 1))); - } else { - throw new UnsupportedOperationException("Illegal operation " + expr); - } - } else if (expr.getArg(0) instanceof Path) { - Path path = (Path) expr.getArg(0); - Constant constant = (Constant) expr.getArg(1); - return asDocument(asDBKey(expr, 0), convert(path, constant)); - } - } else if (op == Ops.STRING_IS_EMPTY) { - return asDocument(asDBKey(expr, 0), ""); - } else if (op == Ops.AND) { - - Queue> pendingDocuments = collectConnectorArgs("$and", expr); - List> unmergeableDocuments = new ArrayList<>(); - List> generatedDocuments = new ArrayList<>(); - - while (!pendingDocuments.isEmpty()) { - - Map lhs = pendingDocuments.poll(); - - for (Map rhs : pendingDocuments) { - Set lhs2 = new LinkedHashSet<>(lhs.keySet()); - lhs2.retainAll(rhs.keySet()); - if (lhs2.isEmpty()) { - lhs.putAll(rhs); - } else { - unmergeableDocuments.add(rhs); - } - } - - generatedDocuments.add(lhs); - pendingDocuments = new LinkedList<>(unmergeableDocuments); - unmergeableDocuments = new LinkedList<>(); - } - - return generatedDocuments.size() == 1 ? generatedDocuments.get(0) : asDocument("$and", generatedDocuments); - } else if (op == Ops.NOT) { - // Handle the not's child - Operation subOperation = (Operation) expr.getArg(0); - Operator subOp = subOperation.getOperator(); - if (subOp == Ops.IN) { - return visit( - ExpressionUtils.operation(Boolean.class, Ops.NOT_IN, subOperation.getArg(0), subOperation.getArg(1)), - context); - } else { - Document arg = (Document) handle(expr.getArg(0)); - return negate(arg); - } - - } else if (op == Ops.OR) { - return asDocument("$or", collectConnectorArgs("$or", expr)); - } else if (op == Ops.NE) { - - Path path = (Path) expr.getArg(0); - Constant constant = (Constant) expr.getArg(1); - return asDocument(asDBKey(expr, 0), asDocument("$ne", convert(path, constant))); - - } else if (op == Ops.STARTS_WITH) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression("^" + regexValue(expr, 1))); - } else if (op == Ops.STARTS_WITH_IC) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression("^" + regexValue(expr, 1), "i")); - } else if (op == Ops.ENDS_WITH) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression(regexValue(expr, 1) + "$")); - } else if (op == Ops.ENDS_WITH_IC) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression(regexValue(expr, 1) + "$", "i")); - } else if (op == Ops.EQ_IGNORE_CASE) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression("^" + regexValue(expr, 1) + "$", "i")); - } else if (op == Ops.STRING_CONTAINS) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression(".*" + regexValue(expr, 1) + ".*")); - } else if (op == Ops.STRING_CONTAINS_IC) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression(".*" + regexValue(expr, 1) + ".*", "i")); - } else if (op == Ops.MATCHES) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression(asDBValue(expr, 1).toString())); - } else if (op == Ops.MATCHES_IC) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression(asDBValue(expr, 1).toString(), "i")); - } else if (op == Ops.LIKE) { - - String regex = ExpressionUtils.likeToRegex((Expression) expr.getArg(1)).toString(); - return asDocument(asDBKey(expr, 0), new BsonRegularExpression(regex)); - } else if (op == Ops.BETWEEN) { - - Document value = new Document("$gte", asDBValue(expr, 1)); - value.append("$lte", asDBValue(expr, 2)); - return asDocument(asDBKey(expr, 0), value); - } else if (op == Ops.IN) { - - int constIndex = 0; - int exprIndex = 1; - if (expr.getArg(1) instanceof Constant) { - constIndex = 1; - exprIndex = 0; - } - if (Collection.class.isAssignableFrom(expr.getArg(constIndex).getType())) { - @SuppressWarnings("unchecked") // guarded by previous check - Collection values = ((Constant>) expr.getArg(constIndex)).getConstant(); - return asDocument(asDBKey(expr, exprIndex), asDocument("$in", values)); - } else { - Path path = (Path) expr.getArg(exprIndex); - Constant constant = (Constant) expr.getArg(constIndex); - return asDocument(asDBKey(expr, exprIndex), convert(path, constant)); - } - } else if (op == Ops.NOT_IN) { - - int constIndex = 0; - int exprIndex = 1; - if (expr.getArg(1) instanceof Constant) { - - constIndex = 1; - exprIndex = 0; - } - if (Collection.class.isAssignableFrom(expr.getArg(constIndex).getType())) { - - @SuppressWarnings("unchecked") // guarded by previous check - Collection values = ((Constant>) expr.getArg(constIndex)).getConstant(); - return asDocument(asDBKey(expr, exprIndex), asDocument("$nin", values)); - } else { - - Path path = (Path) expr.getArg(exprIndex); - Constant constant = (Constant) expr.getArg(constIndex); - return asDocument(asDBKey(expr, exprIndex), asDocument("$ne", convert(path, constant))); - } - } else if (op == Ops.COL_IS_EMPTY) { - - List list = new ArrayList<>(2); - list.add(asDocument(asDBKey(expr, 0), new ArrayList())); - list.add(asDocument(asDBKey(expr, 0), asDocument("$exists", false))); - return asDocument("$or", list); - } else if (op == Ops.LT) { - return asDocument(asDBKey(expr, 0), asDocument("$lt", asDBValue(expr, 1))); - } else if (op == Ops.GT) { - return asDocument(asDBKey(expr, 0), asDocument("$gt", asDBValue(expr, 1))); - } else if (op == Ops.LOE) { - return asDocument(asDBKey(expr, 0), asDocument("$lte", asDBValue(expr, 1))); - } else if (op == Ops.GOE) { - return asDocument(asDBKey(expr, 0), asDocument("$gte", asDBValue(expr, 1))); - } else if (op == Ops.IS_NULL) { - return asDocument(asDBKey(expr, 0), asDocument("$exists", false)); - } else if (op == Ops.IS_NOT_NULL) { - return asDocument(asDBKey(expr, 0), asDocument("$exists", true)); - } else if (op == Ops.CONTAINS_KEY) { - - Path path = (Path) expr.getArg(0); - Expression key = expr.getArg(1); - return asDocument(visit(path, context) + "." + key.toString(), asDocument("$exists", true)); - } else if (op == MongodbOps.NEAR) { - return asDocument(asDBKey(expr, 0), asDocument("$near", asDBValue(expr, 1))); - } else if (op == MongodbOps.NEAR_SPHERE) { - return asDocument(asDBKey(expr, 0), asDocument("$nearSphere", asDBValue(expr, 1))); - } else if (op == MongodbOps.ELEM_MATCH) { - return asDocument(asDBKey(expr, 0), asDocument("$elemMatch", asDBValue(expr, 1))); - } else if (op == QuerydslMongoOps.NO_MATCH) { - return new Document("$where", new BsonJavaScript("function() { return false }")); - } - - throw new UnsupportedOperationException("Illegal operation " + expr); - } - - private Object negate(Document arg) { - - List list = new ArrayList<>(); - for (Map.Entry entry : arg.entrySet()) { - - if (entry.getKey().equals("$or")) { - list.add(asDocument("$nor", entry.getValue())); - } else if (entry.getKey().equals("$and")) { - - List list2 = new ArrayList<>(); - for (Object o : ((Collection) entry.getValue())) { - list2.add(negate((Document) o)); - } - list.add(asDocument("$or", list2)); - } else if (entry.getValue() instanceof Pattern || entry.getValue() instanceof BsonRegularExpression) { - list.add(asDocument(entry.getKey(), asDocument("$not", entry.getValue()))); - } else if (entry.getValue() instanceof Document) { - list.add(negate(entry.getKey(), (Document) entry.getValue())); - } else { - list.add(asDocument(entry.getKey(), asDocument("$ne", entry.getValue()))); - } - } - return list.size() == 1 ? list.get(0) : asDocument("$or", list); - } - - private Object negate(String key, Document value) { - - if (value.size() == 1) { - return asDocument(key, asDocument("$not", value)); - } else { - - List list2 = new ArrayList<>(); - for (Map.Entry entry2 : value.entrySet()) { - list2.add(asDocument(key, asDocument("$not", asDocument(entry2.getKey(), entry2.getValue())))); - } - - return asDocument("$or", list2); - } - } - - protected Object convert(Path property, Constant constant) { - - if (isReference(property)) { - return asReference(constant.getConstant()); - } else if (isId(property)) { - - if (isReference(property.getMetadata().getParent())) { - return asReferenceKey(property.getMetadata().getParent().getType(), constant.getConstant()); - } else if (constant.getType().equals(String.class) && isImplicitObjectIdConversion()) { - - String id = (String) constant.getConstant(); - return ObjectId.isValid(id) ? new ObjectId(id) : id; - } - } - return visit(constant, null); - } - - protected boolean isImplicitObjectIdConversion() { - return true; - } - - protected DBRef asReferenceKey(Class entity, Object id) { - // TODO override in subclass - throw new UnsupportedOperationException(); - } - - protected abstract DBRef asReference(Object constant); - - protected abstract boolean isReference(@Nullable Path arg); - - protected boolean isId(Path arg) { - // TODO override in subclass - return false; - } - - @Override - public String visit(Path expr, Void context) { - - PathMetadata metadata = expr.getMetadata(); - - if (metadata.getParent() != null) { - - Path parent = metadata.getParent(); - if (parent.getMetadata().getPathType() == PathType.DELEGATE) { - parent = parent.getMetadata().getParent(); - } - if (metadata.getPathType() == PathType.COLLECTION_ANY) { - return visit(parent, context); - } else if (parent.getMetadata().getPathType() != PathType.VARIABLE) { - - String rv = getKeyForPath(expr, metadata); - String parentStr = visit(parent, context); - return rv != null ? parentStr + "." + rv : parentStr; - } - } - return getKeyForPath(expr, metadata); - } - - protected String getKeyForPath(Path expr, PathMetadata metadata) { - return metadata.getElement().toString(); - } - - @Override - public Object visit(SubQueryExpression expr, Void context) { - throw new UnsupportedOperationException(); - } - - @Override - public Object visit(ParamExpression expr, Void context) { - throw new UnsupportedOperationException(); - } - - private LinkedList> collectConnectorArgs(String operator, Operation operation) { - - LinkedList> pendingDocuments = new LinkedList<>(); - for (Expression exp : operation.getArgs()) { - Map document = (Map) handle(exp); - if (document.keySet().size() == 1 && document.containsKey(operator)) { - pendingDocuments.addAll((Collection>) document.get(operator)); - } else { - pendingDocuments.add(document); - } - } - return pendingDocuments; - - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAbstractMongodbQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAbstractMongodbQuery.java index 422eea5778..b255d20273 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAbstractMongodbQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAbstractMongodbQuery.java @@ -34,6 +34,8 @@ import com.querydsl.core.types.OrderSpecifier; import com.querydsl.core.types.ParamExpression; import com.querydsl.core.types.Predicate; +import com.querydsl.mongodb.document.AbstractMongodbQuery; +import com.querydsl.mongodb.document.MongodbDocumentSerializer; /** * {@code QuerydslAbstractMongodbQuery} provides a base class for general Querydsl query implementation. @@ -49,8 +51,12 @@ * @author Mark Paluch * @author Christoph Strobl * @since 2.1 + * @deprecated since 3.3, use Querydsl's {@link AbstractMongodbQuery} directly. This class is deprecated for removal + * with the next major release. */ +@Deprecated public abstract class QuerydslAbstractMongodbQuery> + extends AbstractMongodbQuery implements SimpleQuery { private static final JsonWriterSettings JSON_WRITER_SETTINGS = JsonWriterSettings.builder().outputMode(JsonMode.SHELL) @@ -67,6 +73,8 @@ public abstract class QuerydslAbstractMongodbQuery((Q) this, new DefaultQueryMetadata(), false); this.serializer = serializer; } @@ -158,22 +166,6 @@ protected Document createProjection(@Nullable Expression projectionExpression return projection; } - /** - * Compute the filer {@link Document} from the given {@link Predicate}. - * - * @param predicate can be {@literal null}. - * @return an empty {@link Document} if predicate is {@literal null}. - * @see MongodbDocumentSerializer#toQuery(Predicate) - */ - protected Document createQuery(@Nullable Predicate predicate) { - - if (predicate == null) { - return new Document(); - } - - return serializer.toQuery(predicate); - } - /** * Compute the sort {@link Document} from the given list of {@link OrderSpecifier order specifiers}. * @@ -194,14 +186,6 @@ QueryMixin getQueryMixin() { return queryMixin; } - /** - * Get the where definition as a Document instance - * - * @return - */ - Document asDocument() { - return createQuery(queryMixin.getMetadata().getWhere()); - } /** * Returns the {@literal Mongo Shell} representation of the query.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAnyEmbeddedBuilder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAnyEmbeddedBuilder.java index 3ba84a2dd3..b6935a5e8f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAnyEmbeddedBuilder.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAnyEmbeddedBuilder.java @@ -37,7 +37,10 @@ * @author Mark Paluch * @author Christoph Strobl * @since 2.1 + * @deprecated since 3.3, use Querydsl's {@link com.querydsl.mongodb.document.AnyEmbeddedBuilder} directly. This class + * is deprecated for removal with the next major release. */ +@Deprecated public class QuerydslAnyEmbeddedBuilder, K> { private final QueryMixin queryMixin; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslFetchableMongodbQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslFetchableMongodbQuery.java deleted file mode 100644 index 36057d58c9..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslFetchableMongodbQuery.java +++ /dev/null @@ -1,272 +0,0 @@ -/* - * Copyright 2018-2021 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.repository.support; - -import java.util.Collection; -import java.util.Collections; -import java.util.List; - -import org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithProjection; -import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.data.mongodb.core.query.BasicQuery; -import org.springframework.data.mongodb.core.query.Query; -import org.springframework.lang.Nullable; -import org.springframework.util.LinkedMultiValueMap; - -import com.mysema.commons.lang.CloseableIterator; -import com.querydsl.core.Fetchable; -import com.querydsl.core.JoinExpression; -import com.querydsl.core.QueryMetadata; -import com.querydsl.core.QueryModifiers; -import com.querydsl.core.QueryResults; -import com.querydsl.core.types.Expression; -import com.querydsl.core.types.ExpressionUtils; -import com.querydsl.core.types.Operation; -import com.querydsl.core.types.OrderSpecifier; -import com.querydsl.core.types.Path; -import com.querydsl.core.types.Predicate; -import com.querydsl.core.types.dsl.CollectionPathBase; - -/** - * {@link Fetchable} MongoDB query with utilizing {@link MongoOperations} for command execution. - * - * @param result type - * @param concrete subtype - * @author Mark Paluch - * @author Christoph Strobl - * @since 2.1 - */ -abstract class QuerydslFetchableMongodbQuery> - extends QuerydslAbstractMongodbQuery implements Fetchable { - - private final Class entityClass; - private final String collection; - private final MongoOperations mongoOperations; - private final FindWithProjection find; - - QuerydslFetchableMongodbQuery(MongodbDocumentSerializer serializer, Class entityClass, String collection, - MongoOperations mongoOperations) { - - super(serializer); - - this.entityClass = (Class) entityClass; - this.collection = collection; - this.mongoOperations = mongoOperations; - find = mongoOperations.query(this.entityClass).inCollection(collection); - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.Fetchable#iterable() - */ - @Override - public CloseableIterator iterate() { - - org.springframework.data.util.CloseableIterator stream = mongoOperations.stream(createQuery(), - entityClass, collection); - - return new CloseableIterator() { - - @Override - public boolean hasNext() { - return stream.hasNext(); - } - - @Override - public K next() { - return stream.next(); - } - - @Override - public void remove() { - throw new UnsupportedOperationException("Cannot remove from iterator while streaming data."); - } - - @Override - public void close() { - stream.close(); - } - }; - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.Fetchable#fetch() - */ - @Override - public List fetch() { - return find.matching(createQuery()).all(); - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.Fetchable#fetchFirst() - */ - @Override - public K fetchFirst() { - return find.matching(createQuery()).firstValue(); - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.Fetchable#fetchOne() - */ - @Override - public K fetchOne() { - return find.matching(createQuery()).oneValue(); - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.Fetchable#fetchResults() - */ - @Override - public QueryResults fetchResults() { - - long total = fetchCount(); - return total > 0L ? new QueryResults<>(fetch(), getQueryMixin().getMetadata().getModifiers(), total) - : QueryResults.emptyResults(); - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.Fetchable#fetchCount() - */ - @Override - public long fetchCount() { - return find.matching(Query.of(createQuery()).skip(-1).limit(-1)).count(); - } - - /** - * Define a join. - * - * @param ref reference - * @param target join target - * @return new instance of {@link QuerydslJoinBuilder}. - */ - public QuerydslJoinBuilder join(Path ref, Path target) { - return new QuerydslJoinBuilder<>(getQueryMixin(), ref, target); - } - - /** - * Define a join. - * - * @param ref reference - * @param target join target - * @return new instance of {@link QuerydslJoinBuilder}. - */ - public QuerydslJoinBuilder join(CollectionPathBase ref, Path target) { - return new QuerydslJoinBuilder<>(getQueryMixin(), ref, target); - } - - /** - * Define a constraint for an embedded object. - * - * @param collection collection must not be {@literal null}. - * @param target target must not be {@literal null}. - * @return new instance of {@link QuerydslAnyEmbeddedBuilder}. - */ - public QuerydslAnyEmbeddedBuilder anyEmbedded(Path> collection, Path target) { - return new QuerydslAnyEmbeddedBuilder<>(getQueryMixin(), collection); - } - - protected org.springframework.data.mongodb.core.query.Query createQuery() { - - QueryMetadata metadata = getQueryMixin().getMetadata(); - - return createQuery(createFilter(metadata), metadata.getProjection(), metadata.getModifiers(), - metadata.getOrderBy()); - } - - protected org.springframework.data.mongodb.core.query.Query createQuery(@Nullable Predicate filter, - @Nullable Expression projection, QueryModifiers modifiers, List> orderBy) { - - BasicQuery basicQuery = new BasicQuery(createQuery(filter), createProjection(projection)); - - Integer limit = modifiers.getLimitAsInteger(); - Integer offset = modifiers.getOffsetAsInteger(); - - if (limit != null) { - basicQuery.limit(limit); - } - if (offset != null) { - basicQuery.skip(offset); - } - if (orderBy.size() > 0) { - basicQuery.setSortObject(createSort(orderBy)); - } - - return basicQuery; - } - - @Nullable - protected Predicate createFilter(QueryMetadata metadata) { - - Predicate filter; - if (!metadata.getJoins().isEmpty()) { - filter = ExpressionUtils.allOf(metadata.getWhere(), createJoinFilter(metadata)); - } else { - filter = metadata.getWhere(); - } - return filter; - } - - @SuppressWarnings("unchecked") - @Nullable - protected Predicate createJoinFilter(QueryMetadata metadata) { - - LinkedMultiValueMap, Predicate> predicates = new LinkedMultiValueMap<>(); - List joins = metadata.getJoins(); - - for (int i = joins.size() - 1; i >= 0; i--) { - - JoinExpression join = joins.get(i); - Path source = (Path) ((Operation) join.getTarget()).getArg(0); - Path target = (Path) ((Operation) join.getTarget()).getArg(1); - Collection extraFilters = predicates.get(target.getRoot()); - Predicate filter = ExpressionUtils.allOf(join.getCondition(), allOf(extraFilters)); - - List ids = getIds(target.getType(), filter); - - if (ids.isEmpty()) { - return ExpressionUtils.predicate(QuerydslMongoOps.NO_MATCH, source); - } - - Path path = ExpressionUtils.path(String.class, source, "$id"); - predicates.add(source.getRoot(), ExpressionUtils.in((Path) path, ids)); - } - - Path source = (Path) ((Operation) joins.get(0).getTarget()).getArg(0); - return allOf(predicates.get(source.getRoot())); - } - - private Predicate allOf(Collection predicates) { - return predicates != null ? ExpressionUtils.allOf(predicates) : null; - } - - /** - * Fetch the list of ids matching a given condition. - * - * @param targetType must not be {@literal null}. - * @param condition must not be {@literal null}. - * @return empty {@link List} if none found. - */ - protected List getIds(Class targetType, Predicate condition) { - - Query query = createQuery(condition, null, QueryModifiers.EMPTY, Collections.emptyList()); - return mongoOperations.findDistinct(query, "_id", targetType, Object.class); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslJoinBuilder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslJoinBuilder.java deleted file mode 100644 index 344ad08826..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslJoinBuilder.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2018-2021 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.repository.support; - -import com.querydsl.core.JoinType; -import com.querydsl.core.support.QueryMixin; -import com.querydsl.core.types.ExpressionUtils; -import com.querydsl.core.types.Path; -import com.querydsl.core.types.Predicate; - -/** - * {@code QuerydslJoinBuilder} is a builder for join constraints. - *

- * Original implementation source {@link com.querydsl.mongodb.JoinBuilder} by {@literal The Querydsl Team} - * (http://www.querydsl.com/team) licensed under the Apache License, Version - * 2.0. - *

- * Modified for usage with {@link QuerydslAbstractMongodbQuery}. - * - * @param - * @param - * @author tiwe - * @author Mark Paluch - * @author Christoph Strobl - * @since 2.1 - */ -public class QuerydslJoinBuilder, K, T> { - - private final QueryMixin queryMixin; - private final Path ref; - private final Path target; - - QuerydslJoinBuilder(QueryMixin queryMixin, Path ref, Path target) { - - this.queryMixin = queryMixin; - this.ref = ref; - this.target = target; - } - - /** - * Add the given join conditions. - * - * @param conditions must not be {@literal null}. - * @return the target {@link QueryMixin}. - * @see QueryMixin#on(Predicate) - */ - @SuppressWarnings("unchecked") - public Q on(Predicate... conditions) { - - queryMixin.addJoin(JoinType.JOIN, ExpressionUtils.as((Path) ref, target)); - queryMixin.on(conditions); - return queryMixin.getSelf(); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoOps.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoOps.java deleted file mode 100644 index 0c695afd0c..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoOps.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2018-2021 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.repository.support; - -import com.querydsl.core.types.Operator; - -/** - * Spring Data specific {@link Operator operators} for usage with Querydsl and MongoDB. - * - * @author Christoph Strobl - * @since 2.1 - */ -enum QuerydslMongoOps implements Operator { - - /** - * {@link Operator} always evaluating to {@literal false}. - */ - NO_MATCH(Boolean.class); - - private final Class type; - - QuerydslMongoOps(Class type) { - this.type = type; - } - - @Override - public Class getType() { - return type; - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSpringDataMongodbQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSpringDataMongodbQuery.java index 4162a79482..8b30e585e6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSpringDataMongodbQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSpringDataMongodbQuery.java @@ -22,6 +22,9 @@ import java.util.Collections; import java.util.List; +import org.bson.Document; + +import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithProjection; import org.springframework.data.mongodb.core.ReactiveMongoOperations; import org.springframework.data.mongodb.core.query.BasicQuery; @@ -40,19 +43,21 @@ import com.querydsl.core.types.OrderSpecifier; import com.querydsl.core.types.Path; import com.querydsl.core.types.Predicate; -import com.querydsl.core.types.dsl.CollectionPathBase; +import com.querydsl.mongodb.MongodbOps; +import com.querydsl.mongodb.document.MongodbDocumentSerializer; /** * MongoDB query with utilizing {@link ReactiveMongoOperations} for command execution. * + * @implNote This class uses {@link MongoOperations} to directly convert documents into the target entity type. Also, we + * want entites to participate in lifecycle events and entity callbacks. * @param result type * @author Mark Paluch * @author Christoph Strobl * @since 2.2 */ -class ReactiveSpringDataMongodbQuery extends QuerydslAbstractMongodbQuery> { +class ReactiveSpringDataMongodbQuery extends SpringDataMongodbQuerySupport> { - private final Class entityClass; private final ReactiveMongoOperations mongoOperations; private final FindWithProjection find; @@ -60,15 +65,15 @@ class ReactiveSpringDataMongodbQuery extends QuerydslAbstractMongodbQuery entityClass, @Nullable String collection) { super(serializer); - this.entityClass = (Class) entityClass; this.mongoOperations = mongoOperations; - this.find = StringUtils.hasText(collection) ? mongoOperations.query(this.entityClass).inCollection(collection) - : mongoOperations.query(this.entityClass); + this.find = StringUtils.hasText(collection) ? mongoOperations.query((Class) entityClass).inCollection(collection) + : mongoOperations.query((Class) entityClass); } /** @@ -99,48 +104,11 @@ Mono fetchCount() { return createQuery().flatMap(it -> find.matching(it).count()); } - /** - * Define a join. - * - * @param ref reference - * @param target join target - * @return new instance of {@link QuerydslJoinBuilder}. - */ - QuerydslJoinBuilder, K, T> join(Path ref, Path target) { - return new QuerydslJoinBuilder<>(getQueryMixin(), ref, target); - } - - /** - * Define a join. - * - * @param ref reference - * @param target join target - * @return new instance of {@link QuerydslJoinBuilder}. - */ - QuerydslJoinBuilder, K, T> join(CollectionPathBase ref, - Path target) { - - return new QuerydslJoinBuilder<>(getQueryMixin(), ref, target); - } - - /** - * Define a constraint for an embedded object. - * - * @param collection collection must not be {@literal null}. - * @param target target must not be {@literal null}. - * @return new instance of {@link QuerydslAnyEmbeddedBuilder}. - */ - QuerydslAnyEmbeddedBuilder, K> anyEmbedded( - Path> collection, Path target) { - - return new QuerydslAnyEmbeddedBuilder<>(getQueryMixin(), collection); - } - protected Mono createQuery() { QueryMetadata metadata = getQueryMixin().getMetadata(); - return createQuery(createFilter(metadata), metadata.getProjection(), metadata.getModifiers(), + return createQuery(createReactiveFilter(metadata), metadata.getProjection(), metadata.getModifiers(), metadata.getOrderBy()); } @@ -160,7 +128,8 @@ protected Mono createQuery(Mono filter, @Nullable Expression { - BasicQuery basicQuery = new BasicQuery(it, createProjection(projection)); + Document fields = createProjection(projection); + BasicQuery basicQuery = new BasicQuery(it, fields == null ? new Document() : fields); Integer limit = modifiers.getLimitAsInteger(); Integer offset = modifiers.getOffsetAsInteger(); @@ -179,11 +148,11 @@ protected Mono createQuery(Mono filter, @Nullable Expression createFilter(QueryMetadata metadata) { + protected Mono createReactiveFilter(QueryMetadata metadata) { if (!metadata.getJoins().isEmpty()) { - return createJoinFilter(metadata).map(it -> ExpressionUtils.allOf(metadata.getWhere(), it)) + return createReactiveJoinFilter(metadata).map(it -> ExpressionUtils.allOf(metadata.getWhere(), it)) .switchIfEmpty(Mono.justOrEmpty(metadata.getWhere())); } @@ -197,7 +166,7 @@ protected Mono createFilter(QueryMetadata metadata) { * @return */ @SuppressWarnings("unchecked") - protected Mono createJoinFilter(QueryMetadata metadata) { + protected Mono createReactiveJoinFilter(QueryMetadata metadata) { MultiValueMap, Mono> predicates = new LinkedMultiValueMap<>(); List joins = metadata.getJoins(); @@ -230,7 +199,7 @@ protected Mono createJoinFilter(QueryMetadata metadata) { Path source = (Path) ((Operation) joins.get(0).getTarget()).getArg(0); return allOf(predicates.get(source.getRoot())).onErrorResume(NoMatchException.class, - e -> Mono.just(ExpressionUtils.predicate(QuerydslMongoOps.NO_MATCH, e.source))); + e -> Mono.just(ExpressionUtils.predicate(MongodbOps.NO_MATCH, e.source))); } private Mono allOf(@Nullable Collection> predicates) { @@ -246,8 +215,8 @@ private Mono allOf(@Nullable Collection> predicates) */ protected Flux getIds(Class targetType, Mono condition) { - return condition.flatMapMany(it -> getIds(targetType, it)) - .switchIfEmpty(Flux.defer(() -> getIds(targetType, (Predicate) null))); + return condition.flatMapMany(it -> getJoinIds(targetType, it)) + .switchIfEmpty(Flux.defer(() -> getJoinIds(targetType, (Predicate) null))); } /** @@ -257,12 +226,18 @@ protected Flux getIds(Class targetType, Mono condition) { * @param condition must not be {@literal null}. * @return empty {@link List} if none found. */ - protected Flux getIds(Class targetType, @Nullable Predicate condition) { + protected Flux getJoinIds(Class targetType, @Nullable Predicate condition) { return createQuery(Mono.justOrEmpty(condition), null, QueryModifiers.EMPTY, Collections.emptyList()) .flatMapMany(query -> mongoOperations.findDistinct(query, "_id", targetType, Object.class)); } + @Override + protected List getIds(Class aClass, Predicate predicate) { + throw new UnsupportedOperationException( + "Use create Flux getIds(Class targetType, Mono condition)"); + } + /** * Marker exception to indicate no matches for a query using reference Id's. */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuery.java index 8a153d0c2c..d62aa99c5e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuery.java @@ -15,7 +15,27 @@ */ package org.springframework.data.mongodb.repository.support; +import java.util.Collections; +import java.util.List; + +import org.bson.Document; + +import org.springframework.data.mongodb.core.ExecutableFindOperation; import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Nullable; + +import com.mysema.commons.lang.CloseableIterator; +import com.mysema.commons.lang.EmptyCloseableIterator; +import com.querydsl.core.Fetchable; +import com.querydsl.core.QueryMetadata; +import com.querydsl.core.QueryModifiers; +import com.querydsl.core.QueryResults; +import com.querydsl.core.types.Expression; +import com.querydsl.core.types.OrderSpecifier; +import com.querydsl.core.types.Predicate; +import com.querydsl.mongodb.document.MongodbDocumentSerializer; /** * Spring Data specific simple {@link com.querydsl.core.Fetchable} {@link com.querydsl.core.SimpleQuery Query} @@ -25,7 +45,13 @@ * @author Mark Paluch * @author Christoph Strobl */ -public class SpringDataMongodbQuery extends QuerydslFetchableMongodbQuery> { +public class SpringDataMongodbQuery extends SpringDataMongodbQuerySupport> + implements Fetchable { + + private final Class entityClass; + private final String collection; + private final MongoOperations mongoOperations; + private final ExecutableFindOperation.FindWithProjection find; /** * Creates a new {@link SpringDataMongodbQuery}. @@ -33,7 +59,7 @@ public class SpringDataMongodbQuery extends QuerydslFetchableMongodbQuery type) { + public SpringDataMongodbQuery(MongoOperations operations, Class type) { this(operations, type, operations.getCollectionName(type)); } @@ -44,9 +70,174 @@ public SpringDataMongodbQuery(final MongoOperations operations, final Class type, + public SpringDataMongodbQuery(MongoOperations operations, Class type, String collectionName) { + this(new SpringDataMongodbSerializer(operations.getConverter()), operations, type, collectionName); + } + + private SpringDataMongodbQuery(MongodbDocumentSerializer serializer, MongoOperations operations, + Class type, String collectionName) { + + super(serializer); - super(new SpringDataMongodbSerializer(operations.getConverter()), type, collectionName, operations); + this.entityClass = (Class) type; + this.collection = collectionName; + this.mongoOperations = operations; + this.find = mongoOperations.query(this.entityClass).inCollection(collection); } + + /* + * (non-Javadoc) + * @see com.querydsl.core.Fetchable#iterable() + */ + @Override + public CloseableIterator iterate() { + + try { + org.springframework.data.util.CloseableIterator stream = mongoOperations.stream(createQuery(), + entityClass, collection); + + return new CloseableIterator() { + + @Override + public boolean hasNext() { + return stream.hasNext(); + } + + @Override + public T next() { + return stream.next(); + } + + @Override + public void remove() { + throw new UnsupportedOperationException("Cannot remove from iterator while streaming data."); + } + + @Override + public void close() { + stream.close(); + } + }; + } catch (RuntimeException e) { + return handleException(e, new EmptyCloseableIterator<>()); + } + } + + /* + * (non-Javadoc) + * @see com.querydsl.core.Fetchable#fetch() + */ + @Override + public List fetch() { + try { + return find.matching(createQuery()).all(); + } catch (RuntimeException e) { + return handleException(e, Collections.emptyList()); + } + } + + /* + * (non-Javadoc) + * @see com.querydsl.core.Fetchable#fetchFirst() + */ + @Override + public T fetchFirst() { + try { + return find.matching(createQuery()).firstValue(); + } catch (RuntimeException e) { + return handleException(e, null); + } + } + + /* + * (non-Javadoc) + * @see com.querydsl.core.Fetchable#fetchOne() + */ + @Override + public T fetchOne() { + try { + return find.matching(createQuery()).oneValue(); + } catch (RuntimeException e) { + return handleException(e, null); + } + } + + /* + * (non-Javadoc) + * @see com.querydsl.core.Fetchable#fetchResults() + */ + @Override + public QueryResults fetchResults() { + + long total = fetchCount(); + return total > 0L ? new QueryResults<>(fetch(), getQueryMixin().getMetadata().getModifiers(), total) + : QueryResults.emptyResults(); + } + + /* + * (non-Javadoc) + * @see com.querydsl.core.Fetchable#fetchCount() + */ + @Override + public long fetchCount() { + try { + return find.matching(Query.of(createQuery()).skip(-1).limit(-1)).count(); + } catch (RuntimeException e) { + return handleException(e, 0L); + } + } + + protected org.springframework.data.mongodb.core.query.Query createQuery() { + + QueryMetadata metadata = getQueryMixin().getMetadata(); + + return createQuery(createFilter(metadata), metadata.getProjection(), metadata.getModifiers(), + metadata.getOrderBy()); + } + + protected org.springframework.data.mongodb.core.query.Query createQuery(@Nullable Predicate filter, + @Nullable Expression projection, QueryModifiers modifiers, List> orderBy) { + + Document fields = createProjection(projection); + BasicQuery basicQuery = new BasicQuery(createQuery(filter), fields == null ? new Document() : fields); + + Integer limit = modifiers.getLimitAsInteger(); + Integer offset = modifiers.getOffsetAsInteger(); + + if (limit != null) { + basicQuery.limit(limit); + } + if (offset != null) { + basicQuery.skip(offset); + } + if (orderBy.size() > 0) { + basicQuery.setSortObject(createSort(orderBy)); + } + + return basicQuery; + } + + /** + * Fetch the list of ids matching a given condition. + * + * @param targetType must not be {@literal null}. + * @param condition must not be {@literal null}. + * @return empty {@link List} if none found. + */ + protected List getIds(Class targetType, Predicate condition) { + + Query query = createQuery(condition, null, QueryModifiers.EMPTY, Collections.emptyList()); + return mongoOperations.findDistinct(query, "_id", targetType, Object.class); + } + + private static T handleException(RuntimeException e, T defaultValue) { + + if (e.getClass().getName().endsWith("$NoResults")) { + return defaultValue; + } + + throw e; + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java new file mode 100644 index 0000000000..406019cf4d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java @@ -0,0 +1,147 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.util.List; + +import org.bson.Document; +import org.bson.codecs.DocumentCodec; +import org.bson.json.JsonMode; +import org.bson.json.JsonWriterSettings; + +import org.springframework.beans.DirectFieldAccessor; + +import com.mongodb.MongoClientSettings; +import com.querydsl.core.support.QueryMixin; +import com.querydsl.core.types.OrderSpecifier; +import com.querydsl.mongodb.document.AbstractMongodbQuery; +import com.querydsl.mongodb.document.MongodbDocumentSerializer; + +/** + * Support query type to augment Spring Data-specific {@link #toString} representations and + * {@link org.springframework.data.domain.Sort} creation. + * + * @author Mark Paluch + * @since 3.3 + */ +abstract class SpringDataMongodbQuerySupport> + extends AbstractMongodbQuery { + + private final QueryMixin superQueryMixin; + + private static final JsonWriterSettings JSON_WRITER_SETTINGS = JsonWriterSettings.builder().outputMode(JsonMode.SHELL) + .build(); + + private final MongodbDocumentSerializer serializer; + + @SuppressWarnings("unchecked") + SpringDataMongodbQuerySupport(MongodbDocumentSerializer serializer) { + super(serializer); + this.serializer = serializer; + + DirectFieldAccessor fieldAccessor = new DirectFieldAccessor(this); + this.superQueryMixin = (QueryMixin) fieldAccessor.getPropertyValue("queryMixin"); + } + + /** + * Returns the {@literal Mongo Shell} representation of the query.
+ * The following query + * + *
+	 *
+	 * where(p.lastname.eq("Matthews")).orderBy(p.firstname.asc()).offset(1).limit(5);
+	 * 
+ * + * results in + * + *
+	 *
+	 * find({"lastname" : "Matthews"}).sort({"firstname" : 1}).skip(1).limit(5)
+	 * 
+ * + * Note that encoding to {@link String} may fail when using data types that cannot be encoded or DBRef's without an + * identifier. + * + * @return never {@literal null}. + */ + @Override + public String toString() { + + Document projection = createProjection(getQueryMixin().getMetadata().getProjection()); + Document sort = createSort(getQueryMixin().getMetadata().getOrderBy()); + DocumentCodec codec = new DocumentCodec(MongoClientSettings.getDefaultCodecRegistry()); + + StringBuilder sb = new StringBuilder("find(" + asDocument().toJson(JSON_WRITER_SETTINGS, codec)); + if (projection != null && projection.isEmpty()) { + sb.append(", ").append(projection.toJson(JSON_WRITER_SETTINGS, codec)); + } + sb.append(")"); + if (!sort.isEmpty()) { + sb.append(".sort(").append(sort.toJson(JSON_WRITER_SETTINGS, codec)).append(")"); + } + if (getQueryMixin().getMetadata().getModifiers().getOffset() != null) { + sb.append(".skip(").append(getQueryMixin().getMetadata().getModifiers().getOffset()).append(")"); + } + if (getQueryMixin().getMetadata().getModifiers().getLimit() != null) { + sb.append(".limit(").append(getQueryMixin().getMetadata().getModifiers().getLimit()).append(")"); + } + return sb.toString(); + } + + /** + * Get the where definition as a Document instance + * + * @return + */ + public Document asDocument() { + return createQuery(getQueryMixin().getMetadata().getWhere()); + } + + /** + * Obtain the {@literal Mongo Shell} json query representation. + * + * @return never {@literal null}. + */ + public String toJson() { + return toJson(JSON_WRITER_SETTINGS); + } + + /** + * Obtain the json query representation applying given {@link JsonWriterSettings settings}. + * + * @param settings must not be {@literal null}. + * @return never {@literal null}. + */ + public String toJson(JsonWriterSettings settings) { + return asDocument().toJson(settings); + } + + /** + * Compute the sort {@link Document} from the given list of {@link OrderSpecifier order specifiers}. + * + * @param orderSpecifiers can be {@literal null}. + * @return an empty {@link Document} if predicate is {@literal null}. + * @see MongodbDocumentSerializer#toSort(List) + */ + protected Document createSort(List> orderSpecifiers) { + return serializer.toSort(orderSpecifiers); + } + + // TODO: Remove once https://github.com/querydsl/querydsl/pull/2916 is merged + QueryMixin getQueryMixin() { + return superQueryMixin; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializer.java index e18f30d96a..2453e1a46c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializer.java @@ -22,6 +22,7 @@ import java.util.regex.Pattern; import org.bson.Document; + import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.convert.QueryMapper; @@ -39,6 +40,7 @@ import com.querydsl.core.types.PathMetadata; import com.querydsl.core.types.PathType; import com.querydsl.mongodb.MongodbSerializer; +import com.querydsl.mongodb.document.MongodbDocumentSerializer; /** * Custom {@link MongodbSerializer} to take mapping information into account when building keys for constraints. diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java index 5584ae6e3b..0067eb3bf1 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java @@ -28,8 +28,6 @@ import java.util.Arrays; -import javax.annotation.Nullable; - import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -49,6 +47,7 @@ import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory; import org.springframework.data.mongodb.repository.support.SimpleReactiveMongoRepository; import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; +import org.springframework.lang.Nullable; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import org.springframework.util.ClassUtils; From d57c5a952984e39fda28d2807e7ac41ae2eb162f Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 15 Jun 2021 16:51:28 +0200 Subject: [PATCH 012/885] Add support for Wildcard Index. Add WildcardIndexed annotation and the programatic WildcardIndex. Closes #3225 Original pull request: #3671. --- .../data/mongodb/core/IndexConverters.java | 4 + .../data/mongodb/core/index/IndexField.java | 25 ++- .../data/mongodb/core/index/IndexInfo.java | 26 +++ .../MongoPersistentEntityIndexResolver.java | 106 ++++++++-- .../mongodb/core/index/WildcardIndex.java | 198 ++++++++++++++++++ .../mongodb/core/index/WildcardIndexed.java | 130 ++++++++++++ .../core/index/IndexInfoUnitTests.java | 11 + ...ersistentEntityIndexResolverUnitTests.java | 85 +++++++- src/main/asciidoc/reference/mapping.adoc | 88 ++++++++ 9 files changed, 654 insertions(+), 19 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java index e7fae4df5c..4d5349f7e7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java @@ -115,6 +115,10 @@ private static Converter getIndexDefinitionIndexO ops = ops.collation(fromDocument(indexOptions.get("collation", Document.class))); } + if(indexOptions.containsKey("wildcardProjection")) { + ops.wildcardProjection(indexOptions.get("wildcardProjection", Document.class)); + } + return ops; }; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java index d0a1da68ea..7883da2270 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java @@ -29,7 +29,7 @@ public final class IndexField { enum Type { - GEO, TEXT, DEFAULT, HASH; + GEO, TEXT, DEFAULT, HASH, WILDCARD; } private final String key; @@ -48,7 +48,7 @@ private IndexField(String key, @Nullable Direction direction, @Nullable Type typ if (Type.GEO.equals(type) || Type.TEXT.equals(type)) { Assert.isNull(direction, "Geo/Text indexes must not have a direction!"); } else { - if (!Type.HASH.equals(type)) { + if (!(Type.HASH.equals(type) || Type.WILDCARD.equals(type))) { Assert.notNull(direction, "Default indexes require a direction"); } } @@ -77,6 +77,17 @@ static IndexField hashed(String key) { return new IndexField(key, null, Type.HASH); } + /** + * Creates a {@literal wildcard} {@link IndexField} for the given key. + * + * @param key must not be {@literal null} or empty. + * @return new instance of {@link IndexField}. + * @since 3.3 + */ + static IndexField wildcard(String key) { + return new IndexField(key, null, Type.WILDCARD); + } + /** * Creates a geo {@link IndexField} for the given key. * @@ -142,6 +153,16 @@ public boolean isHashed() { return Type.HASH.equals(type); } + /** + * Returns whether the {@link IndexField} is contains a {@literal wildcard} expression. + * + * @return {@literal true} if {@link IndexField} contains a wildcard {@literal $**}. + * @since 3.3 + */ + public boolean isWildcard() { + return Type.WILDCARD.equals(type); + } + /* * (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java index 7b507a8727..f8370b1bc6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java @@ -55,6 +55,7 @@ public class IndexInfo { private @Nullable Duration expireAfter; private @Nullable String partialFilterExpression; private @Nullable Document collation; + private @Nullable Document wildcardProjection; public IndexInfo(List indexFields, String name, boolean unique, boolean sparse, String language) { @@ -99,6 +100,8 @@ public static IndexInfo indexInfoOf(Document sourceDocument) { if (ObjectUtils.nullSafeEquals("hashed", value)) { indexFields.add(IndexField.hashed(key)); + } else if (key.contains("$**")) { + indexFields.add(IndexField.wildcard(key)); } else { Double keyValue = new Double(value.toString()); @@ -131,6 +134,10 @@ public static IndexInfo indexInfoOf(Document sourceDocument) { info.expireAfter = Duration.ofSeconds(NumberUtils.convertNumberToTargetClass(expireAfterSeconds, Long.class)); } + if (sourceDocument.containsKey("wildcardProjection")) { + info.wildcardProjection = sourceDocument.get("wildcardProjection", Document.class); + } + return info; } @@ -216,6 +223,16 @@ public Optional getCollation() { return Optional.ofNullable(collation); } + /** + * Get {@literal wildcardProjection} information. + * + * @return {@link Optional#empty() empty} if not set. + * @since 3.3 + */ + public Optional getWildcardProjection() { + return Optional.ofNullable(wildcardProjection); + } + /** * Get the duration after which documents within the index expire. * @@ -234,6 +251,14 @@ public boolean isHashed() { return getIndexFields().stream().anyMatch(IndexField::isHashed); } + /** + * @return {@literal true} if a wildcard index field is present. + * @since 3.3 + */ + public boolean isWildcard() { + return getIndexFields().stream().anyMatch(IndexField::isWildcard); + } + @Override public String toString() { @@ -303,4 +328,5 @@ public boolean equals(Object obj) { } return true; } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java index 2fc63fb36c..78f895e077 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java @@ -46,6 +46,7 @@ import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.data.mongodb.util.DotPath; import org.springframework.data.spel.EvaluationContextProvider; @@ -121,6 +122,7 @@ public List resolveIndexForEntity(MongoPersistentEntity indexInformation = new ArrayList<>(); String collection = root.getCollection(); indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", collection, root)); + indexInformation.addAll(potentiallyCreateWildcardIndexDefinitions("", collection, root)); indexInformation.addAll(potentiallyCreateTextIndexDefinition(root, collection)); root.doWithProperties((PropertyHandler) property -> this @@ -162,17 +164,18 @@ private void potentiallyAddIndexForProperty(MongoPersistentEntity root, Mongo * @return List of {@link IndexDefinitionHolder} representing indexes for given type and its referenced property * types. Will never be {@code null}. */ - private List resolveIndexForClass( TypeInformation type, String dotPath, - Path path, String collection, CycleGuard guard) { + private List resolveIndexForClass(TypeInformation type, String dotPath, Path path, + String collection, CycleGuard guard) { return resolveIndexForEntity(mappingContext.getRequiredPersistentEntity(type), dotPath, path, collection, guard); } - private List resolveIndexForEntity(MongoPersistentEntity entity, String dotPath, - Path path, String collection, CycleGuard guard) { + private List resolveIndexForEntity(MongoPersistentEntity entity, String dotPath, Path path, + String collection, CycleGuard guard) { List indexInformation = new ArrayList<>(); indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions(dotPath, collection, entity)); + indexInformation.addAll(potentiallyCreateWildcardIndexDefinitions(dotPath, collection, entity)); entity.doWithProperties((PropertyHandler) property -> this .guardAndPotentiallyAddIndexForProperty(property, dotPath, path, collection, indexInformation, guard)); @@ -196,15 +199,15 @@ private void guardAndPotentiallyAddIndexForProperty(MongoPersistentProperty pers if (persistentProperty.isEntity()) { try { - indexes.addAll(resolveIndexForEntity(mappingContext.getPersistentEntity(persistentProperty), propertyDotPath.toString(), - propertyPath, collection, guard)); + indexes.addAll(resolveIndexForEntity(mappingContext.getPersistentEntity(persistentProperty), + propertyDotPath.toString(), propertyPath, collection, guard)); } catch (CyclicPropertyReferenceException e) { LOGGER.info(e.getMessage()); } } - List indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(), collection, - persistentProperty); + List indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(), + collection, persistentProperty); if (!indexDefinitions.isEmpty()) { indexes.addAll(indexDefinitions); @@ -232,6 +235,11 @@ private List createIndexDefinitionHolderForProperty(Strin if (persistentProperty.isAnnotationPresent(HashIndexed.class)) { indices.add(createHashedIndexDefinition(dotPath, collection, persistentProperty)); } + if (persistentProperty.isAnnotationPresent(WildcardIndexed.class)) { + indices.add(createWildcardIndexDefinition(dotPath, collection, + persistentProperty.getRequiredAnnotation(WildcardIndexed.class), + mappingContext.getPersistentEntity(persistentProperty))); + } return indices; } @@ -246,6 +254,18 @@ private List potentiallyCreateCompoundIndexDefinitions(St return createCompoundIndexDefinitions(dotPath, collection, entity); } + private List potentiallyCreateWildcardIndexDefinitions(String dotPath, String collection, + MongoPersistentEntity entity) { + + if (entity.findAnnotation(WildcardIndexed.class) == null) { + return Collections.emptyList(); + } + + return Collections.singletonList(new IndexDefinitionHolder(dotPath, + createWildcardIndexDefinition(dotPath, collection, entity.getRequiredAnnotation(WildcardIndexed.class), entity), + collection)); + } + private Collection potentiallyCreateTextIndexDefinition( MongoPersistentEntity root, String collection) { @@ -292,9 +312,8 @@ private Collection potentiallyCreateTextIndexDe } - private void appendTextIndexInformation(DotPath dotPath, Path path, - TextIndexDefinitionBuilder indexDefinitionBuilder, MongoPersistentEntity entity, - TextIndexIncludeOptions includeOptions, CycleGuard guard) { + private void appendTextIndexInformation(DotPath dotPath, Path path, TextIndexDefinitionBuilder indexDefinitionBuilder, + MongoPersistentEntity entity, TextIndexIncludeOptions includeOptions, CycleGuard guard) { entity.doWithProperties(new PropertyHandler() { @@ -311,8 +330,7 @@ public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) if (includeOptions.isForce() || indexed != null || persistentProperty.isEntity()) { - DotPath propertyDotPath = dotPath - .append(persistentProperty.getFieldName()); + DotPath propertyDotPath = dotPath.append(persistentProperty.getFieldName()); Path propertyPath = path.append(persistentProperty); @@ -406,6 +424,32 @@ protected IndexDefinitionHolder createCompoundIndexDefinition(String dotPath, St return new IndexDefinitionHolder(dotPath, indexDefinition, collection); } + protected IndexDefinitionHolder createWildcardIndexDefinition(String dotPath, String collection, + WildcardIndexed index, @Nullable MongoPersistentEntity entity) { + + WildcardIndex indexDefinition = new WildcardIndex(dotPath); + + if (StringUtils.hasText(index.wildcardProjection())) { + indexDefinition.wildcardProjection(evaluateWildcardProjection(index.wildcardProjection(), entity)); + } + + if (!index.useGeneratedName()) { + indexDefinition.named(pathAwareIndexName(index.name(), dotPath, entity, null)); + } + + if (StringUtils.hasText(index.partialFilter())) { + indexDefinition.partial(evaluatePartialFilter(index.partialFilter(), entity)); + } + + if (StringUtils.hasText(index.collation())) { + indexDefinition.collation(evaluateCollation(index.collation(), entity)); + } else if (entity != null && entity.hasCollation()) { + indexDefinition.collation(entity.getCollation()); + } + + return new IndexDefinitionHolder(dotPath, indexDefinition, collection); + } + private org.bson.Document resolveCompoundIndexKeyFromStringDefinition(String dotPath, String keyDefinitionString, PersistentEntity entity) { @@ -510,6 +554,33 @@ private PartialIndexFilter evaluatePartialFilter(String filterExpression, Persis return PartialIndexFilter.of(BsonUtils.parse(filterExpression, null)); } + private org.bson.Document evaluateWildcardProjection(String projectionExpression, PersistentEntity entity) { + + Object result = evaluate(projectionExpression, getEvaluationContextForProperty(entity)); + + if (result instanceof org.bson.Document) { + return (org.bson.Document) result; + } + + return BsonUtils.parse(projectionExpression, null); + } + + private Collation evaluateCollation(String collationExpression, PersistentEntity entity) { + + Object result = evaluate(collationExpression, getEvaluationContextForProperty(entity)); + if (result instanceof org.bson.Document) { + return Collation.from((org.bson.Document) result); + } + if (result instanceof Collation) { + return (Collation) result; + } + if (result instanceof String) { + return Collation.parse(result.toString()); + } + throw new IllegalStateException("Cannot parse collation " + result); + + } + /** * Creates {@link HashedIndex} wrapped in {@link IndexDefinitionHolder} out of {@link HashIndexed} for a given * {@link MongoPersistentProperty}. @@ -657,8 +728,8 @@ private void resolveAndAddIndexesForAssociation(Association indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(), collection, - property); + List indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(), + collection, property); if (!indexDefinitions.isEmpty()) { indexes.addAll(indexDefinitions); @@ -998,6 +1069,11 @@ public org.bson.Document getIndexKeys() { public org.bson.Document getIndexOptions() { return indexDefinition.getIndexOptions(); } + + @Override + public String toString() { + return "IndexDefinitionHolder{" + "indexKeys=" + getIndexKeys() + '}'; + } } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java new file mode 100644 index 0000000000..ab1cda6183 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java @@ -0,0 +1,198 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.time.Duration; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; + +/** + * {@link WildcardIndex} is a specific {@link Index} that can be used to include all fields into an index based on the + * {@code $**" : 1} pattern on a root object (the one typically carrying the + * {@link org.springframework.data.mongodb.core.mapping.Document} annotation). On those it is possible to use + * {@link #wildcardProjectionInclude(String...)} and {@link #wildcardProjectionExclude(String...)} to define specific + * paths for in-/exclusion. + *

+ * It can also be used to define an index on a specific field path and its subfields, e.g. + * {@code "path.to.field.$**" : 1}.
+ * Note that {@literal wildcardProjections} are not allowed in this case. + *

+ * LIMITATIONS
+ *

    + *
  • {@link #unique() Unique} and {@link #expire(long) ttl} options are not supported.
  • + *
  • Keys used for sharding must not be included
  • + *
  • Cannot be used to generate any type of geo index.
  • + *
+ * + * @author Christoph Strobl + * @see MongoDB Reference Documentation: Wildcard + * Indexes/ + * @since 3.3 + */ +public class WildcardIndex extends Index { + + private @Nullable String fieldName; + private Map wildcardProjection = new LinkedHashMap<>(); + + /** + * Create a new instance of {@link WildcardIndex} using {@code $**}. + */ + public WildcardIndex() {} + + /** + * Create a new instance of {@link WildcardIndex} for the given {@literal path}. If no {@literal path} is provided the + * index will be considered a root one using {@code $**}.
+ * NOTE {@link #wildcardProjectionInclude(String...)}, {@link #wildcardProjectionExclude(String...)} + * can only be used for top level index definitions having an {@literal empty} or {@literal null} path. + * + * @param path can be {@literal null}. If {@literal null} all fields will be indexed. + */ + public WildcardIndex(@Nullable String path) { + this.fieldName = path; + } + + /** + * Include the {@code _id} field in {@literal wildcardProjection}. + * + * @return this. + */ + public WildcardIndex includeId() { + + wildcardProjection.put("_id", 1); + return this; + } + + /** + * Set the index name to use. + * + * @param name + * @return this. + */ + @Override + public WildcardIndex named(String name) { + + super.named(name); + return this; + } + + /** + * Unique option is not supported. + * + * @throws UnsupportedOperationException + */ + @Override + public Index unique() { + throw new UnsupportedOperationException("Wildcard Index does not support 'unique'."); + } + + /** + * ttl option is not supported. + * + * @throws UnsupportedOperationException + */ + @Override + public Index expire(long seconds) { + throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'."); + } + + /** + * ttl option is not supported. + * + * @throws UnsupportedOperationException + */ + @Override + public Index expire(long value, TimeUnit timeUnit) { + throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'."); + } + + /** + * ttl option is not supported. + * + * @throws UnsupportedOperationException + */ + @Override + public Index expire(Duration duration) { + throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'."); + } + + /** + * Add fields to be included from indexing via {@code wildcardProjection}.
+ * This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes. + * + * @param paths must not be {@literal null}. + * @return this. + */ + public WildcardIndex wildcardProjectionInclude(String... paths) { + + for (String path : paths) { + wildcardProjection.put(path, 1); + } + return this; + } + + /** + * Add fields to be excluded from indexing via {@code wildcardProjection}.
+ * This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes. + * + * @param paths must not be {@literal null}. + * @return this. + */ + public WildcardIndex wildcardProjectionExclude(String... paths) { + + for (String path : paths) { + wildcardProjection.put(path, 0); + } + return this; + } + + /** + * Set the fields to be in-/excluded from indexing via {@code wildcardProjection}.
+ * This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes. + * + * @param includeExclude must not be {@literal null}. + * @return this. + */ + public WildcardIndex wildcardProjection(Map includeExclude) { + + wildcardProjection.putAll(includeExclude); + return this; + } + + private String getTargetFieldName() { + return StringUtils.hasText(fieldName) ? (fieldName + ".$**") : "$**"; + } + + @Override + public Document getIndexKeys() { + return new Document(getTargetFieldName(), 1); + } + + @Override + public Document getIndexOptions() { + + Document options = new Document(super.getIndexOptions()); + if (!CollectionUtils.isEmpty(wildcardProjection)) { + options.put("wildcardProjection", new Document(wildcardProjection)); + } + return options; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java new file mode 100644 index 0000000000..5f32aaf45c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java @@ -0,0 +1,130 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotation for an entity or property that should be used as key for a + * Wildcard Index.
+ * If placed on a {@link ElementType#TYPE type} that is a root level domain entity (one having an + * {@link org.springframework.data.mongodb.core.mapping.Document} annotation) will advise the index creator to create a + * wildcard index for it. + * + *
+ *
+ * @Document
+ * @WildcardIndexed
+ * public class Product {
+ *     ...
+ * }
+ *
+ * db.product.createIndex({ "$**" : 1 } , {})
+ * 
+ * + * {@literal wildcardProjection} can be used to specify keys to in-/exclude in the index. + * + *
+ *
+ * @Document
+ * @WildcardIndexed(wildcardProjection = "{ 'userMetadata.age' : 0 }")
+ * public class User {
+ *     private @Id String id;
+ *     private UserMetadata userMetadata;
+ * }
+ *
+ *
+ * db.user.createIndex(
+ *   { "$**" : 1 },
+ *   { "wildcardProjection" :
+ *     { "userMetadata.age" : 0 }
+ *   }
+ * )
+ * 
+ * + * Wildcard indexes can also be expressed by adding the annotation directly to the field. Please note that + * {@literal wildcardProjection} is not allowed on nested paths. + * + *
+ * @Document
+ * public class User {
+ * 
+ *     private @Id String id;
+ *
+ *     @WildcardIndexed
+ *     private UserMetadata userMetadata;
+ * }
+ *
+ *
+ * db.user.createIndex({ "userMetadata.$**" : 1 }, {})
+ * 
+ * + * @author Christoph Strobl + * @since 3.3 + */ +@Documented +@Target({ ElementType.TYPE, ElementType.FIELD }) +@Retention(RetentionPolicy.RUNTIME) +public @interface WildcardIndexed { + + /** + * Index name either as plain value or as {@link org.springframework.expression.spel.standard.SpelExpression template + * expression}.
+ *
+ * The name will only be applied as is when defined on root level. For usage on nested or embedded structures the + * provided name will be prefixed with the path leading to the entity.
+ * + * @return + */ + String name() default ""; + + /** + * If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults + * to {@literal false}. + * + * @return {@literal false} by default. + */ + boolean useGeneratedName() default false; + + /** + * Only index the documents in a collection that meet a specified {@link IndexFilter filter expression}.
+ * + * @return empty by default. + * @see https://docs.mongodb.com/manual/core/index-partial/ + */ + String partialFilter() default ""; + + /** + * Explicitly specify sub fields to be in-/excluded as a {@link org.bson.Document#parse(String) prasable} String. + *
+ * NOTE: Can only be done on root level documents. + * + * @return empty by default. + */ + String wildcardProjection() default ""; + + /** + * Defines the collation to apply. + * + * @return an empty {@link String} by default. + */ + String collation() default ""; +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexInfoUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexInfoUnitTests.java index 2026dfc644..3618e4c1f9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexInfoUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexInfoUnitTests.java @@ -36,6 +36,7 @@ public class IndexInfoUnitTests { static final String INDEX_WITH_PARTIAL_FILTER = "{ \"v\" : 2, \"key\" : { \"k3y\" : 1 }, \"name\" : \"partial-filter-index\", \"ns\" : \"db.collection\", \"partialFilterExpression\" : { \"quantity\" : { \"$gte\" : 10 } } }"; static final String INDEX_WITH_EXPIRATION_TIME = "{ \"v\" : 2, \"key\" : { \"lastModifiedDate\" : 1 },\"name\" : \"expire-after-last-modified\", \"ns\" : \"db.collectio\", \"expireAfterSeconds\" : 3600 }"; static final String HASHED_INDEX = "{ \"v\" : 2, \"key\" : { \"score\" : \"hashed\" }, \"name\" : \"score_hashed\", \"ns\" : \"db.collection\" }"; + static final String WILDCARD_INDEX = "{ \"v\" : 2, \"key\" : { \"$**\" : 1 }, \"name\" : \"$**_1\", \"wildcardProjection\" : { \"fieldA\" : 0, \"fieldB.fieldC\" : 0 } }"; @Test public void isIndexForFieldsCorrectly() { @@ -79,6 +80,16 @@ public void hashedIndexIsMarkedAsSuch() { assertThat(getIndexInfo(HASHED_INDEX).isHashed()).isTrue(); } + @Test // GH-3225 + public void identifiesWildcardIndexCorrectly() { + assertThat(getIndexInfo(WILDCARD_INDEX).isWildcard()).isTrue(); + } + + @Test // GH-3225 + public void readsWildcardIndexProjectionCorrectly() { + assertThat(getIndexInfo(WILDCARD_INDEX).getWildcardProjection()).contains(new Document("fieldA", 0).append("fieldB.fieldC", 0)); + } + private static IndexInfo getIndexInfo(String documentJson) { return IndexInfo.indexInfoOf(Document.parse(documentJson)); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java index 489070548d..0a06561b67 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java @@ -15,8 +15,9 @@ */ package org.springframework.data.mongodb.core.index; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.*; -import static org.springframework.data.mongodb.test.util.Assertions.*; +import static org.springframework.data.mongodb.test.util.Assertions.assertThatExceptionOfType; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; @@ -25,6 +26,7 @@ import java.util.Arrays; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import org.junit.Test; import org.junit.runner.RunWith; @@ -1323,6 +1325,49 @@ public void errorsOnIndexOnEmbedded() { } + @Test // GH-3225 + public void resolvesWildcardOnRoot() { + + List indices = prepareMappingContextAndResolveIndexForType( + WithWildCardIndexOnEntity.class); + assertThat(indices).hasSize(1); + assertThat(indices.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("$**", 1); + }); + } + + @Test // GH-3225 + public void resolvesWildcardOnProperty() { + + List indices = prepareMappingContextAndResolveIndexForType( + WithWildCardIndexOnProperty.class); + assertThat(indices).hasSize(3); + assertThat(indices.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("value.$**", 1); + }); + assertThat(indices.get(1)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("the_field.$**", 1); + }); + assertThat(indices.get(2)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("withOptions.$**", 1); + assertThat(it.getIndexOptions()).containsEntry("name", + "withOptions.idx") + .containsEntry("collation", new org.bson.Document("locale", "en_US")) + .containsEntry("partialFilterExpression", new org.bson.Document("$eq", 1)); + }); + } + + @Test // GH-3225 + public void resolvesWildcardTypeOfNestedProperty() { + + List indices = prepareMappingContextAndResolveIndexForType( + WithWildCardOnEntityOfNested.class); + assertThat(indices).hasSize(1); + assertThat(indices.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("value.$**", 1); + }); + } + @Document class MixedIndexRoot { @@ -1533,7 +1578,7 @@ class InvalidIndexOnUnwrapped { @Indexed // @Unwrapped.Nullable // - UnwrappableType unwrappableType; + UnwrappableType unwrappableType; } @@ -1573,6 +1618,42 @@ class WithHashedIndex { @HashIndexed String value; } + @Document + @WildcardIndexed + class WithWildCardIndexOnEntity { + + String value; + } + + @Document + @WildcardIndexed(wildcardProjection = "{'_id' : 1, 'value' : 0}") + class WithWildCardIndexHavingProjectionOnEntity { + + String value; + } + + @Document + class WithWildCardIndexOnProperty { + + @WildcardIndexed // + Map value; + + @WildcardIndexed // + @Field("the_field") // + Map renamedField; + + @WildcardIndexed(name = "idx", partialFilter = "{ '$eq' : 1 }", collation = "en_US") // + Map withOptions; + + } + + @Document + class WithWildCardOnEntityOfNested { + + WithWildCardIndexOnEntity value; + + } + @Document class WithHashedIndexAndIndex { diff --git a/src/main/asciidoc/reference/mapping.adoc b/src/main/asciidoc/reference/mapping.adoc index f08d03d3f0..7caf1093b9 100644 --- a/src/main/asciidoc/reference/mapping.adoc +++ b/src/main/asciidoc/reference/mapping.adoc @@ -760,6 +760,94 @@ mongoOperations.indexOpsFor(Jedi.class) ---- ==== +[[mapping-usage-indexes.wildcard-index]] +=== Wildcard Indexes + +A `WildcardIndex` is an index that can be used to include all fields or specific ones based a given (wildcard) pattern. +For details, refer to the https://docs.mongodb.com/manual/core/index-wildcard/[MongoDB Documentation]. + +The index can be set up programmatically using `WildcardIndex` via `IndexOperations`. + +.Programmatic WildcardIndex setup +==== +[source,java] +---- +mongoOperations + .indexOps(User.class) + .ensureIndex(new WildcardIndex("userMetadata")); +---- +[source,javascript] +---- +db.user.createIndex({ "userMetadata.$**" : 1 }, {}) +---- +==== + +The `@WildcardIndex` annotation allows a declarative index setup an can be added on either a type or property. + +If placed on a type that is a root level domain entity (one having an `@Document` annotation) will advise the index creator to create a +wildcard index for it. + +.Wildcard index on domain type +==== +[source,java] +---- +@Document +@WildcardIndexed +public class Product { + ... +} +---- +[source,javascript] +---- +db.product.createIndex({ "$**" : 1 },{}) +---- +==== + +The `wildcardProjection` can be used to specify keys to in-/exclude in the index. + +.Wildcard index with `wildcardProjection` +==== +[source,java] +---- +@Document +@WildcardIndexed(wildcardProjection = "{ 'userMetadata.age' : 0 }") +public class User { + private @Id String id; + private UserMetadata userMetadata; +} +---- +[source,javascript] +---- +db.user.createIndex( + { "$**" : 1 }, + { "wildcardProjection" : + { "userMetadata.age" : 0 } + } +) +---- +==== + +Wildcard indexes can also be expressed by adding the annotation directly to the field. +Please note that `wildcardProjection` is not allowed on nested paths. + +.Wildcard index on property +==== +[source,java] +---- +@Document +public class User { + private @Id String id; + + @WildcardIndexed + private UserMetadata userMetadata; +} +---- +[source,javascript] +---- +db.user.createIndex({ "userMetadata.$**" : 1 }, {}) +---- +==== + [[mapping-usage-indexes.text-index]] === Text Indexes From f3b90c2b8abf760d503115f4569b5e57bd6beb8e Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 14 Jul 2021 15:03:39 +0200 Subject: [PATCH 013/885] Polishing. Reformat code. Tweak javadoc. Reject wildcard projection usage on properties with a MappingException. Omit wildcard projections when declared on document types that are used as subdocument. See #3225 Original pull request: #3671. --- .../data/mongodb/core/IndexConverters.java | 2 +- .../data/mongodb/core/index/IndexField.java | 15 +++++++-- .../data/mongodb/core/index/IndexInfo.java | 2 +- .../MongoPersistentEntityIndexResolver.java | 24 ++++++++++++-- .../mongodb/core/index/WildcardIndex.java | 10 +++--- .../mongodb/core/index/WildcardIndexed.java | 14 ++++---- ...ersistentEntityIndexResolverUnitTests.java | 33 ++++++++++++++++++- src/main/asciidoc/new-features.adoc | 1 + src/main/asciidoc/reference/mapping.adoc | 9 ++--- 9 files changed, 87 insertions(+), 23 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java index 4d5349f7e7..db1fa0bf80 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java @@ -115,7 +115,7 @@ private static Converter getIndexDefinitionIndexO ops = ops.collation(fromDocument(indexOptions.get("collation", Document.class))); } - if(indexOptions.containsKey("wildcardProjection")) { + if (indexOptions.containsKey("wildcardProjection")) { ops.wildcardProjection(indexOptions.get("wildcardProjection", Document.class)); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java index 7883da2270..843584b29d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java @@ -29,7 +29,17 @@ public final class IndexField { enum Type { - GEO, TEXT, DEFAULT, HASH, WILDCARD; + GEO, TEXT, DEFAULT, + + /** + * @since 2.2 + */ + HASH, + + /** + * @since 3.3 + */ + WILDCARD; } private final String key; @@ -78,7 +88,8 @@ static IndexField hashed(String key) { } /** - * Creates a {@literal wildcard} {@link IndexField} for the given key. + * Creates a {@literal wildcard} {@link IndexField} for the given key. The {@code key} must follow the + * {@code fieldName.$**} notation. * * @param key must not be {@literal null} or empty. * @return new instance of {@link IndexField}. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java index f8370b1bc6..51b4aa48cf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java @@ -100,7 +100,7 @@ public static IndexInfo indexInfoOf(Document sourceDocument) { if (ObjectUtils.nullSafeEquals("hashed", value)) { indexFields.add(IndexField.hashed(key)); - } else if (key.contains("$**")) { + } else if (key.endsWith("$**")) { indexFields.add(IndexField.wildcard(key)); } else { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java index 78f895e077..5fdb1cbc40 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java @@ -119,6 +119,8 @@ public List resolveIndexForEntity(MongoPersistentEntity String .format("Entity %s is not a collection root. Make sure to annotate it with @Document!", root.getName())); + verifyWildcardIndexedProjection(root); + List indexInformation = new ArrayList<>(); String collection = root.getCollection(); indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", collection, root)); @@ -133,6 +135,24 @@ public List resolveIndexForEntity(MongoPersistentEntity entity) { + + entity.doWithAll(it -> { + + if (it.isAnnotationPresent(WildcardIndexed.class)) { + + WildcardIndexed indexed = it.getRequiredAnnotation(WildcardIndexed.class); + + if (!ObjectUtils.isEmpty(indexed.wildcardProjection())) { + + throw new MappingException(String.format( + "WildcardIndexed.wildcardProjection cannot be used on nested paths. Offending property: %s.%s", + entity.getName(), it.getName())); + } + } + }); + } + private void potentiallyAddIndexForProperty(MongoPersistentEntity root, MongoPersistentProperty persistentProperty, List indexes, CycleGuard guard) { @@ -257,7 +277,7 @@ private List potentiallyCreateCompoundIndexDefinitions(St private List potentiallyCreateWildcardIndexDefinitions(String dotPath, String collection, MongoPersistentEntity entity) { - if (entity.findAnnotation(WildcardIndexed.class) == null) { + if (!entity.isAnnotationPresent(WildcardIndexed.class)) { return Collections.emptyList(); } @@ -429,7 +449,7 @@ protected IndexDefinitionHolder createWildcardIndexDefinition(String dotPath, St WildcardIndex indexDefinition = new WildcardIndex(dotPath); - if (StringUtils.hasText(index.wildcardProjection())) { + if (StringUtils.hasText(index.wildcardProjection()) && ObjectUtils.isEmpty(dotPath)) { indexDefinition.wildcardProjection(evaluateWildcardProjection(index.wildcardProjection(), entity)); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java index ab1cda6183..b07c3b1bc9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java @@ -51,7 +51,7 @@ public class WildcardIndex extends Index { private @Nullable String fieldName; - private Map wildcardProjection = new LinkedHashMap<>(); + private final Map wildcardProjection = new LinkedHashMap<>(); /** * Create a new instance of {@link WildcardIndex} using {@code $**}. @@ -97,7 +97,7 @@ public WildcardIndex named(String name) { /** * Unique option is not supported. * - * @throws UnsupportedOperationException + * @throws UnsupportedOperationException not supported for wildcard indexes. */ @Override public Index unique() { @@ -107,7 +107,7 @@ public Index unique() { /** * ttl option is not supported. * - * @throws UnsupportedOperationException + * @throws UnsupportedOperationException not supported for wildcard indexes. */ @Override public Index expire(long seconds) { @@ -117,7 +117,7 @@ public Index expire(long seconds) { /** * ttl option is not supported. * - * @throws UnsupportedOperationException + * @throws UnsupportedOperationException not supported for wildcard indexes. */ @Override public Index expire(long value, TimeUnit timeUnit) { @@ -127,7 +127,7 @@ public Index expire(long value, TimeUnit timeUnit) { /** * ttl option is not supported. * - * @throws UnsupportedOperationException + * @throws UnsupportedOperationException not supported for wildcard indexes. */ @Override public Index expire(Duration duration) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java index 5f32aaf45c..d1b18e85bf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java @@ -38,7 +38,7 @@ * * db.product.createIndex({ "$**" : 1 } , {}) * - * + * * {@literal wildcardProjection} can be used to specify keys to in-/exclude in the index. * *
@@ -65,7 +65,7 @@
  * 
  * @Document
  * public class User {
- * 
+ *
  *     private @Id String id;
  *
  *     @WildcardIndexed
@@ -89,9 +89,9 @@
 	 * expression}. 
*
* The name will only be applied as is when defined on root level. For usage on nested or embedded structures the - * provided name will be prefixed with the path leading to the entity.
- * - * @return + * provided name will be prefixed with the path leading to the entity. + * + * @return empty by default. */ String name() default ""; @@ -115,8 +115,8 @@ /** * Explicitly specify sub fields to be in-/excluded as a {@link org.bson.Document#parse(String) prasable} String. *
- * NOTE: Can only be done on root level documents. - * + * NOTE: Can only be applied on root level documents. + * * @return empty by default. */ String wildcardProjection() default ""; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java index 0a06561b67..30f6a9bfc5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java @@ -32,10 +32,12 @@ import org.junit.runner.RunWith; import org.junit.runners.Suite; import org.junit.runners.Suite.SuiteClasses; + import org.springframework.core.annotation.AliasFor; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.annotation.Id; import org.springframework.data.geo.Point; +import org.springframework.data.mapping.MappingException; import org.springframework.data.mongodb.core.DocumentTestUtils; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolverUnitTests.CompoundIndexResolutionTests; @@ -1333,6 +1335,20 @@ public void resolvesWildcardOnRoot() { assertThat(indices).hasSize(1); assertThat(indices.get(0)).satisfies(it -> { assertThat(it.getIndexKeys()).containsEntry("$**", 1); + assertThat(it.getIndexOptions()).isEmpty(); + }); + } + + @Test // GH-3225 + public void resolvesWildcardWithProjectionOnRoot() { + + List indices = prepareMappingContextAndResolveIndexForType( + WithWildCardIndexHavingProjectionOnEntity.class); + assertThat(indices).hasSize(1); + assertThat(indices.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("$**", 1); + assertThat(it.getIndexOptions()).containsEntry("wildcardProjection", + org.bson.Document.parse("{'_id' : 1, 'value' : 0}")); }); } @@ -1365,6 +1381,15 @@ public void resolvesWildcardTypeOfNestedProperty() { assertThat(indices).hasSize(1); assertThat(indices.get(0)).satisfies(it -> { assertThat(it.getIndexKeys()).containsEntry("value.$**", 1); + assertThat(it.getIndexOptions()).hasSize(1).containsKey("name"); + }); + } + + @Test // GH-3225 + public void rejectsWildcardProjectionOnNestedPaths() { + + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> { + prepareMappingContextAndResolveIndexForType(WildcardIndexedProjectionOnNestedPath.class); }); } @@ -1647,10 +1672,16 @@ class WithWildCardIndexOnProperty { } + @Document + class WildcardIndexedProjectionOnNestedPath { + + @WildcardIndexed(wildcardProjection = "{}") String foo; + } + @Document class WithWildCardOnEntityOfNested { - WithWildCardIndexOnEntity value; + WithWildCardIndexHavingProjectionOnEntity value; } diff --git a/src/main/asciidoc/new-features.adoc b/src/main/asciidoc/new-features.adoc index a74594bff0..74458b9971 100644 --- a/src/main/asciidoc/new-features.adoc +++ b/src/main/asciidoc/new-features.adoc @@ -6,6 +6,7 @@ * Extended support for <> entities. * Include/exclude `null` properties on write to `Document` through `@Field(write=…)`. +* Support for <>. [[new-features.3.2]] == What's New in Spring Data MongoDB 3.2 diff --git a/src/main/asciidoc/reference/mapping.adoc b/src/main/asciidoc/reference/mapping.adoc index 7caf1093b9..e301826697 100644 --- a/src/main/asciidoc/reference/mapping.adoc +++ b/src/main/asciidoc/reference/mapping.adoc @@ -782,9 +782,9 @@ db.user.createIndex({ "userMetadata.$**" : 1 }, {}) ---- ==== -The `@WildcardIndex` annotation allows a declarative index setup an can be added on either a type or property. +The `@WildcardIndex` annotation allows a declarative index setup that can used either with a document type or property. -If placed on a type that is a root level domain entity (one having an `@Document` annotation) will advise the index creator to create a +If placed on a type that is a root level domain entity (one annotated with `@Document`) , the index resolver will create a wildcard index for it. .Wildcard index on domain type @@ -794,7 +794,7 @@ wildcard index for it. @Document @WildcardIndexed public class Product { - ... + // … } ---- [source,javascript] @@ -828,7 +828,8 @@ db.user.createIndex( ==== Wildcard indexes can also be expressed by adding the annotation directly to the field. -Please note that `wildcardProjection` is not allowed on nested paths. +Please note that `wildcardProjection` is not allowed on nested paths such as properties. +Projections on types annotated with `@WildcardIndexed` are omitted during index creation. .Wildcard index on property ==== From 23177fef0c0fe163e44d42543f17a621969d6e5f Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 12 Jul 2021 10:32:13 +0200 Subject: [PATCH 014/885] Custom Converter should also be applicable for simple types. This commit fixes a regression that prevented custom converters from being applied to types considered store native ones. Original pull request: #3703. Fixes #3670 --- .../core/convert/MappingMongoConverter.java | 6 ++++- .../MappingMongoConverterUnitTests.java | 25 +++++++++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index aced009cda..83be993b93 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -1173,7 +1173,7 @@ protected Object getPotentiallyConvertedSimpleRead(Object value, TypeInformation @SuppressWarnings({ "rawtypes", "unchecked" }) private Object getPotentiallyConvertedSimpleRead(Object value, @Nullable Class target) { - if (target == null || ClassUtils.isAssignableValue(target, value)) { + if (target == null) { return value; } @@ -1181,6 +1181,10 @@ private Object getPotentiallyConvertedSimpleRead(Object value, @Nullable Class) target, value.toString()); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index 369f6dbdef..759be5c6b3 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -30,6 +30,7 @@ import java.time.temporal.ChronoUnit; import java.util.*; +import org.bson.types.Binary; import org.bson.types.Code; import org.bson.types.Decimal128; import org.bson.types.ObjectId; @@ -2568,6 +2569,21 @@ void readsMapContainingNullValue() { .containsEntry("item3", "i3"); } + @Test // GH-3670 + void appliesCustomConverterEvenToSimpleTypes() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new MongoSimpleTypeConverter()); + })); + converter.afterPropertiesSet(); + + org.bson.Document source = new org.bson.Document("content", new Binary(new byte[] {0x00, 0x42})); + + GenericType target = converter.read(GenericType.class, source); + assertThat(target.content).isInstanceOf(byte[].class); + } + static class GenericType { T content; } @@ -3136,6 +3152,15 @@ public TypeImplementingMap convert(org.bson.Document source) { } } + @ReadingConverter + public static class MongoSimpleTypeConverter implements Converter { + + @Override + public byte[] convert(Binary source) { + return source.getData(); + } + } + static class TypeWrappingTypeImplementingMap { String id; From 3f27e8e152768d0ecff078190720ff59acdb7a70 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 12 Jul 2021 07:56:59 +0200 Subject: [PATCH 015/885] Fix raw document conversion in Collection like properties. Along the lines make sure to convert map like structures correctly if they do not come as a Document, eg. cause they got converted to a plain Map in a post load, pre convert event. Closes #3702 Original pull request: #3704. --- .../core/convert/MappingMongoConverter.java | 14 ++++++- .../MappingMongoConverterUnitTests.java | 39 +++++++++++++++++++ 2 files changed, 52 insertions(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 83be993b93..9cb1f89797 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -2040,7 +2040,19 @@ public S convert(Object source, TypeInformation } if (typeHint.isMap()) { - return (S) mapConverter.convert(this, (Bson) source, typeHint); + + if(ClassUtils.isAssignable(Document.class, typeHint.getType())) { + return (S) documentConverter.convert(this, (Bson) source, typeHint); + } + + if(source instanceof Bson) { + return (S) mapConverter.convert(this, (Bson) source, typeHint); + } + if(source instanceof Map) { + return (S) mapConverter.convert(this, new Document((Map) source), typeHint); + } + + throw new IllegalArgumentException(String.format("Expected map like structure but found %s", source.getClass())); } if (source instanceof DBRef) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index 759be5c6b3..dbc9e3d752 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -2584,6 +2584,38 @@ void appliesCustomConverterEvenToSimpleTypes() { assertThat(target.content).isInstanceOf(byte[].class); } + @Test // GH-3702 + void readsRawDocument() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("raw", new org.bson.Document("simple", 1).append("document", new org.bson.Document("inner-doc", 1))); + + WithRawDocumentProperties target = converter.read(WithRawDocumentProperties.class, source); + + assertThat(target.raw).isInstanceOf(org.bson.Document.class).isEqualTo( new org.bson.Document("simple", 1).append("document", new org.bson.Document("inner-doc", 1))); + } + + @Test // GH-3702 + void readsListOfRawDocument() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("listOfRaw", Arrays.asList(new org.bson.Document("simple", 1).append("document", new org.bson.Document("inner-doc", 1)))); + + WithRawDocumentProperties target = converter.read(WithRawDocumentProperties.class, source); + + assertThat(target.listOfRaw) + .containsExactly(new org.bson.Document("simple", 1).append("document", new org.bson.Document("inner-doc", 1))); + } + + @Test // GH-3692 + void readsMapThatDoesNotComeAsDocument() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("mapOfObjects", Collections.singletonMap("simple", 1)); + + ClassWithMapProperty target = converter.read(ClassWithMapProperty.class, source); + + assertThat(target.mapOfObjects).containsEntry("simple",1); + + } + static class GenericType { T content; } @@ -3243,6 +3275,13 @@ public Set> entrySet() { } } + static class WithRawDocumentProperties { + + String id; + org.bson.Document raw; + List listOfRaw; + } + static class WithFieldWrite { @org.springframework.data.mongodb.core.mapping.Field( From f38f6d67ab54b29454203946d55e2004dc26504d Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Thu, 15 Jul 2021 09:59:46 +0200 Subject: [PATCH 016/885] Polishing. Support DBObject and Map that as source for entity materialization and map conversion. See #3702 Original pull request: #3704. --- .../core/convert/MappingMongoConverter.java | 13 +++--- .../data/mongodb/util/BsonUtils.java | 43 +++++++++++++++++++ .../MappingMongoConverterUnitTests.java | 5 ++- .../data/mongodb/util/json/BsonUtilsTest.java | 17 +++++++- 4 files changed, 67 insertions(+), 11 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 9cb1f89797..48505559c0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -2042,14 +2042,11 @@ public S convert(Object source, TypeInformation if (typeHint.isMap()) { if(ClassUtils.isAssignable(Document.class, typeHint.getType())) { - return (S) documentConverter.convert(this, (Bson) source, typeHint); + return (S) documentConverter.convert(this, BsonUtils.asBson(source), typeHint); } - if(source instanceof Bson) { - return (S) mapConverter.convert(this, (Bson) source, typeHint); - } - if(source instanceof Map) { - return (S) mapConverter.convert(this, new Document((Map) source), typeHint); + if (BsonUtils.supportsBson(source)) { + return (S) mapConverter.convert(this, BsonUtils.asBson(source), typeHint); } throw new IllegalArgumentException(String.format("Expected map like structure but found %s", source.getClass())); @@ -2064,8 +2061,8 @@ public S convert(Object source, TypeInformation String.format(INCOMPATIBLE_TYPES, source, BasicDBList.class, typeHint.getType(), getPath())); } - if (source instanceof Bson) { - return (S) documentConverter.convert(this, (Bson) source, typeHint); + if (BsonUtils.supportsBson(source)) { + return (S) documentConverter.convert(this, BsonUtils.asBson(source), typeHint); } return (S) elementConverter.convert(source, typeHint); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java index 4d51af7dee..d452ad662f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java @@ -494,6 +494,49 @@ private static Map getAsMap(Object source) { return null; } + /** + * Returns the given source object as {@link Bson}, i.e. {@link Document}s and maps as is or throw + * {@link IllegalArgumentException}. + * + * @param source + * @return the converted/casted source object. + * @throws IllegalArgumentException if {@code source} cannot be converted/cast to {@link Bson}. + * @since 3.2.3 + * @see #supportsBson(Object) + */ + @SuppressWarnings("unchecked") + public static Bson asBson(Object source) { + + if (source instanceof Document) { + return (Document) source; + } + + if (source instanceof BasicDBObject) { + return (BasicDBObject) source; + } + + if (source instanceof DBObject) { + return new Document(((DBObject) source).toMap()); + } + + if (source instanceof Map) { + return new Document((Map) source); + } + + throw new IllegalArgumentException(String.format("Cannot convert %s to Bson", source)); + } + + /** + * Returns the given source can be used/converted as {@link Bson}. + * + * @param source + * @return {@literal true} if the given source can be converted to {@link Bson}. + * @since 3.2.3 + */ + public static boolean supportsBson(Object source) { + return source instanceof DBObject || source instanceof Map; + } + /** * Returns given object as {@link Collection}. Will return the {@link Collection} as is if the source is a * {@link Collection} already, will convert an array into a {@link Collection} or simply create a single element diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index dbc9e3d752..ca94ac6e3c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -934,10 +934,11 @@ void convertsSetToBasicDBList() { assertThat(readResult.iterator().next()).isInstanceOf(Address.class); } - @Test // DATAMONGO-402 + @Test // DATAMONGO-402, GH-3702 void readsMemberClassCorrectly() { - org.bson.Document document = new org.bson.Document("inner", new org.bson.Document("value", "FOO!")); + org.bson.Document document = new org.bson.Document("inner", + new LinkedHashMap<>(new org.bson.Document("value", "FOO!"))); Outer outer = converter.read(Outer.class, document); assertThat(outer.inner).isNotNull(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/BsonUtilsTest.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/BsonUtilsTest.java index 8210dd9a6f..166932c237 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/BsonUtilsTest.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/BsonUtilsTest.java @@ -19,7 +19,7 @@ import java.util.ArrayList; import java.util.Collection; -import java.util.List; +import java.util.Collections; import org.bson.BsonDouble; import org.bson.BsonInt32; @@ -29,10 +29,16 @@ import org.bson.Document; import org.bson.types.ObjectId; import org.junit.jupiter.api.Test; + import org.springframework.data.mongodb.util.BsonUtils; +import com.mongodb.BasicDBList; + /** + * Unit tests for {@link BsonUtils}. + * * @author Christoph Strobl + * @author Mark Paluch */ class BsonUtilsTest { @@ -111,4 +117,13 @@ void asCollectionConvertsWrapsNonIterable() { assertThat((Collection)BsonUtils.asCollection(source)).containsExactly(source); } + + @Test // GH-3702 + void supportsBsonShouldReportIfConversionSupported() { + + assertThat(BsonUtils.supportsBson("foo")).isFalse(); + assertThat(BsonUtils.supportsBson(new Document())).isTrue(); + assertThat(BsonUtils.supportsBson(new BasicDBList())).isTrue(); + assertThat(BsonUtils.supportsBson(Collections.emptyMap())).isTrue(); + } } From bacbd7133e6d9e0f2969261110ad16ebfa0ff98a Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Thu, 15 Jul 2021 09:52:50 +0200 Subject: [PATCH 017/885] Add support for creating Time Series collection. Introduce time series to CollectionOptions and add dedicated TimeSeries annotation to derive values from. Closes #3731 Original pull request: #3732. --- .../data/mongodb/core/CollectionOptions.java | 144 ++++++++++++++++-- .../data/mongodb/core/EntityOperations.java | 72 +++++++++ .../data/mongodb/core/MongoTemplate.java | 28 +++- .../mongodb/core/ReactiveMongoTemplate.java | 17 ++- .../data/mongodb/core/mapping/TimeSeries.java | 86 +++++++++++ .../core/timeseries/Granularities.java | 45 ++++++ .../mongodb/core/timeseries/Granularity.java | 27 ++++ .../mongodb/core/MongoTemplateUnitTests.java | 54 +++++-- .../core/ReactiveMongoTemplateUnitTests.java | 47 ++++++ src/main/asciidoc/new-features.adoc | 1 + src/main/asciidoc/reference/mongodb.adoc | 1 + src/main/asciidoc/reference/time-series.adoc | 45 ++++++ 12 files changed, 547 insertions(+), 20 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularities.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java create mode 100644 src/main/asciidoc/reference/time-series.adoc diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java index ca61d18d96..3e509e54f2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java @@ -17,8 +17,11 @@ import java.util.Optional; +import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.core.timeseries.Granularities; +import org.springframework.data.mongodb.core.timeseries.Granularity; import org.springframework.data.mongodb.core.validation.Validator; import org.springframework.data.util.Optionals; import org.springframework.lang.Nullable; @@ -42,6 +45,7 @@ public class CollectionOptions { private @Nullable Boolean capped; private @Nullable Collation collation; private ValidationOptions validationOptions; + private @Nullable TimeSeriesOptions timeSeriesOptions; /** * Constructs a new CollectionOptions instance. @@ -54,17 +58,19 @@ public class CollectionOptions { */ @Deprecated public CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped) { - this(size, maxDocuments, capped, null, ValidationOptions.none()); + this(size, maxDocuments, capped, null, ValidationOptions.none(), null); } private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped, - @Nullable Collation collation, ValidationOptions validationOptions) { + @Nullable Collation collation, ValidationOptions validationOptions, + @Nullable TimeSeriesOptions timeSeriesOptions) { this.maxDocuments = maxDocuments; this.size = size; this.capped = capped; this.collation = collation; this.validationOptions = validationOptions; + this.timeSeriesOptions = timeSeriesOptions; } /** @@ -78,7 +84,7 @@ public static CollectionOptions just(Collation collation) { Assert.notNull(collation, "Collation must not be null!"); - return new CollectionOptions(null, null, null, collation, ValidationOptions.none()); + return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null); } /** @@ -88,7 +94,21 @@ public static CollectionOptions just(Collation collation) { * @since 2.0 */ public static CollectionOptions empty() { - return new CollectionOptions(null, null, null, null, ValidationOptions.none()); + return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null); + } + + /** + * Quick way to set up {@link CollectionOptions} for a Time Series collection. For more advanced settings use + * {@link #timeSeries(TimeSeriesOptions)}. + * + * @param timeField The name of the property which contains the date in each time series document. Must not be + * {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @see #timeSeries(TimeSeriesOptions) + * @since 3.3 + */ + public static CollectionOptions timeSeries(String timeField) { + return empty().timeSeries(TimeSeriesOptions.timeSeries(timeField)); } /** @@ -99,7 +119,7 @@ public static CollectionOptions empty() { * @since 2.0 */ public CollectionOptions capped() { - return new CollectionOptions(size, maxDocuments, true, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, true, collation, validationOptions, null); } /** @@ -110,7 +130,7 @@ public CollectionOptions capped() { * @since 2.0 */ public CollectionOptions maxDocuments(long maxDocuments) { - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions); } /** @@ -121,7 +141,7 @@ public CollectionOptions maxDocuments(long maxDocuments) { * @since 2.0 */ public CollectionOptions size(long size) { - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions); } /** @@ -132,7 +152,7 @@ public CollectionOptions size(long size) { * @since 2.0 */ public CollectionOptions collation(@Nullable Collation collation) { - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions); } /** @@ -252,7 +272,20 @@ public CollectionOptions schemaValidationAction(ValidationAction validationActio public CollectionOptions validation(ValidationOptions validationOptions) { Assert.notNull(validationOptions, "ValidationOptions must not be null!"); - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions); + } + + /** + * Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}. + * + * @param timeSeriesOptions must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 3.3 + */ + public CollectionOptions timeSeries(TimeSeriesOptions timeSeriesOptions) { + + Assert.notNull(timeSeriesOptions, "TimeSeriesOptions must not be null!"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions); } /** @@ -303,6 +336,16 @@ public Optional getValidationOptions() { return validationOptions.isEmpty() ? Optional.empty() : Optional.of(validationOptions); } + /** + * Get the {@link TimeSeriesOptions} if available. + * + * @return {@link Optional#empty()} if not specified. + * @since 3.3 + */ + public Optional getTimeSeriesOptions() { + return Optional.ofNullable(timeSeriesOptions); + } + /** * Encapsulation of ValidationOptions options. * @@ -398,4 +441,87 @@ boolean isEmpty() { return !Optionals.isAnyPresent(getValidator(), getValidationAction(), getValidationLevel()); } } + + /** + * Options applicable to Time Series collections. + * + * @author Christoph Strobl + * @since 3.3 + * @see https://docs.mongodb.com/manual/core/timeseries-collections + */ + public static class TimeSeriesOptions { + + private final String timeField; + + @Nullable // + private String metaField; + + private Granularity granularity; + + private TimeSeriesOptions(String timeField, @Nullable String metaField, Granularity granularity) { + + this.timeField = timeField; + this.metaField = metaField; + this.granularity = granularity; + } + + /** + * Create a new instance of {@link TimeSeriesOptions} using the given field as its {@literal timeField}. The one, + * that contains the date in each time series document.
+ * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * + * @param timeField must not be {@literal null}. + * @return new instance of {@link TimeSeriesOptions}. + */ + public static TimeSeriesOptions timeSeries(String timeField) { + return new TimeSeriesOptions(timeField, null, Granularities.DEFAULT); + } + + /** + * Set the name of the field which contains metadata in each time series document. Should not be the {@literal id} + * nor {@link TimeSeriesOptions#timeSeries(String)} timeField} nor point to an {@literal array} or + * {@link java.util.Collection}.
+ * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * + * @param metaField must not be {@literal null}. + * @return new instance of {@link TimeSeriesOptions}. + */ + public TimeSeriesOptions metaField(String metaField) { + return new TimeSeriesOptions(timeField, metaField, granularity); + } + + /** + * Select the {@link Granularity} parameter to define how data in the time series collection is organized. Select + * one that is closest to the time span between incoming measurements. + * + * @return new instance of {@link TimeSeriesOptions}. + */ + public TimeSeriesOptions granularity(Granularity granularity) { + return new TimeSeriesOptions(timeField, metaField, granularity); + } + + /** + * @return never {@literal null}. + */ + public String getTimeField() { + return timeField; + } + + /** + * @return can be {@literal null}. Might be an {@literal empty} {@link String} as well, so maybe check via + * {@link org.springframework.util.StringUtils#hasText(String)}. + */ + @Nullable + public String getMetaField() { + return metaField; + } + + /** + * @return never {@literal null}. + */ + public Granularity getGranularity() { + return granularity; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java index f2daf0287d..9fb8836e1a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java @@ -29,19 +29,23 @@ import org.springframework.data.mapping.PersistentPropertyAccessor; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mapping.model.ConvertingPropertyAccessor; +import org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions; import org.springframework.data.mongodb.core.convert.MongoWriter; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.mapping.TimeSeries; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.timeseries.Granularities; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; /** * Common operations performed on an entity in the context of it's mapping metadata. @@ -778,6 +782,24 @@ interface TypedOperations { * @return */ Optional getCollation(Query query); + + /** + * Derive the applicable {@link CollectionOptions} for the given type. + * + * @return never {@literal null}. + * @since 3.3 + */ + CollectionOptions getCollectionOptions(); + + /** + * Map the fields of a given {@link TimeSeriesOptions} against the target domain type to consider potentially + * annotated field names. + * + * @param options must not be {@literal null}. + * @return never {@literal null}. + * @since 3.3 + */ + TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options); } /** @@ -817,6 +839,16 @@ public Optional getCollation(Query query) { return query.getCollation(); } + + @Override + public CollectionOptions getCollectionOptions() { + return CollectionOptions.empty(); + } + + @Override + public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options) { + return options; + } } /** @@ -854,6 +886,46 @@ public Optional getCollation(Query query) { return Optional.ofNullable(entity.getCollation()); } + + @Override + public CollectionOptions getCollectionOptions() { + + CollectionOptions collectionOptions = CollectionOptions.empty(); + if (entity.hasCollation()) { + collectionOptions = collectionOptions.collation(entity.getCollation()); + } + + if (entity.isAnnotationPresent(TimeSeries.class)) { + + TimeSeries timeSeries = entity.getRequiredAnnotation(TimeSeries.class); + TimeSeriesOptions options = TimeSeriesOptions.timeSeries(timeSeries.timeField()); + if (StringUtils.hasText(timeSeries.metaField())) { + options = options.metaField(timeSeries.metaField()); + } + if (!Granularities.DEFAULT.equals(timeSeries.granularity())) { + options = options.granularity(timeSeries.granularity()); + } + collectionOptions = collectionOptions.timeSeries(options); + } + + return collectionOptions; + } + + @Override + public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions source) { + + TimeSeriesOptions target = TimeSeriesOptions.timeSeries(mappedNameOrDefault(source.getTimeField())); + + if (StringUtils.hasText(source.getMetaField())) { + target = target.metaField(mappedNameOrDefault(source.getMetaField())); + } + return target.granularity(source.getGranularity()); + } + + private String mappedNameOrDefault(String name) { + MongoPersistentProperty persistentProperty = entity.getPersistentProperty(name); + return persistentProperty != null ? persistentProperty.getFieldName() : name; + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java index eae4f42706..c833e511bf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java @@ -99,6 +99,7 @@ import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.data.mongodb.core.timeseries.Granularities; import org.springframework.data.mongodb.core.validation.Validator; import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.data.projection.SpelAwareProxyProjectionFactory; @@ -597,7 +598,7 @@ public void setSessionSynchronization(SessionSynchronization sessionSynchronizat * @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.Class) */ public MongoCollection createCollection(Class entityClass) { - return createCollection(entityClass, CollectionOptions.empty()); + return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions()); } /* @@ -2435,6 +2436,19 @@ protected MongoCollection doCreateCollection(String collectionName, Do co.validationOptions(options); } + if(collectionOptions.containsKey("timeseries")) { + + Document timeSeries = collectionOptions.get("timeseries", Document.class); + com.mongodb.client.model.TimeSeriesOptions options = new com.mongodb.client.model.TimeSeriesOptions(timeSeries.getString("timeField")); + if(timeSeries.containsKey("metaField")) { + options.metaField(timeSeries.getString("metaField")); + } + if(timeSeries.containsKey("granularity")) { + options.granularity(TimeSeriesGranularity.valueOf(timeSeries.getString("granularity").toUpperCase())); + } + co.timeSeriesOptions(options); + } + db.createCollection(collectionName, co); MongoCollection coll = db.getCollection(collectionName, Document.class); @@ -2589,6 +2603,18 @@ protected Document convertToDocument(@Nullable CollectionOptions collectionOptio collectionOptions.getValidationOptions().ifPresent(it -> it.getValidator() // .ifPresent(val -> doc.put("validator", getMappedValidator(val, targetType)))); + + collectionOptions.getTimeSeriesOptions().map(operations.forType(targetType)::mapTimeSeriesOptions).ifPresent(it -> { + + Document timeseries = new Document("timeField", it.getTimeField()); + if(StringUtils.hasText(it.getMetaField())) { + timeseries.append("metaField", it.getMetaField()); + } + if(!Granularities.DEFAULT.equals(it.getGranularity())) { + timeseries.append("granularity", it.getGranularity().name().toLowerCase()); + } + doc.put("timeseries", timeseries); + }); } return doc; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java index 614894f3b6..2403e9a394 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java @@ -17,6 +17,7 @@ import static org.springframework.data.mongodb.core.query.SerializationUtils.*; +import org.springframework.data.mongodb.core.timeseries.Granularities; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.util.function.Tuple2; @@ -665,7 +666,7 @@ public Mono createMono(String collectionName, ReactiveCollectionCallback< * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class) */ public Mono> createCollection(Class entityClass) { - return createCollection(entityClass, CollectionOptions.empty()); + return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions()); } /* @@ -2505,6 +2506,20 @@ protected CreateCollectionOptions convertToCreateCollectionOptions(@Nullable Col result.validationOptions(validationOptions); }); + collectionOptions.getTimeSeriesOptions().map(operations.forType(entityType)::mapTimeSeriesOptions).ifPresent(it -> { + + TimeSeriesOptions options = new TimeSeriesOptions(it.getTimeField()); + + if(StringUtils.hasText(it.getMetaField())) { + options.metaField(it.getMetaField()); + } + if(!Granularities.DEFAULT.equals(it.getGranularity())) { + options.granularity(TimeSeriesGranularity.valueOf(it.getGranularity().name().toUpperCase())); + } + + result.timeSeriesOptions(options); + }); + return result; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java new file mode 100644 index 0000000000..8a5fe255e0 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java @@ -0,0 +1,86 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.mongodb.core.timeseries.Granularities; + +/** + * Identifies a domain object to be persisted to a MongoDB Time Series collection. + * + * @author Christoph Strobl + * @since 3.3 + * @see https://docs.mongodb.com/manual/core/timeseries-collections + */ +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE }) +@Document +public @interface TimeSeries { + + /** + * The collection the document representing the entity is supposed to be stored in. If not configured, a default + * collection name will be derived from the type's name. The attribute supports SpEL expressions to dynamically + * calculate the collection based on a per operation basis. + * + * @return the name of the collection to be used. + * @see Document#collection() + */ + @AliasFor(annotation = Document.class, attribute = "collection") + String collection() default ""; + + /** + * The name of the property which contains the date in each time series document.
+ * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * + * @return never {@literal null}. + */ + String timeField(); + + /** + * The name of the field which contains metadata in each time series document. Should not be the {@literal id} nor + * {@link #timeField()} nor point to an {@literal array} or {@link java.util.Collection}.
+ * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * + * @return empty {@link String} by default. + */ + String metaField() default ""; + + /** + * Select the {@link Granularities granularity} parameter to define how data in the time series collection is + * organized. + * + * @return {@link Granularities#DEFAULT server default} by default. + */ + Granularities granularity() default Granularities.DEFAULT; + + /** + * Defines the collation to apply when executing a query or creating indexes. + * + * @return an empty {@link String} by default. + * @see Document#collation() + */ + @AliasFor(annotation = Document.class, attribute = "collation") + String collation() default ""; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularities.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularities.java new file mode 100644 index 0000000000..f4cac5232c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularities.java @@ -0,0 +1,45 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.timeseries; + +/** + * {@link Granularity Granularities} available for Time Series data. + * + * @author Christoph Strobl + * @since 3.3 + */ +public enum Granularities implements Granularity { + + /** + * Server default value to indicate no explicit value should be sent. + */ + DEFAULT, + + /** + * High frequency ingestion. + */ + SECONDS, + + /** + * Medium frequency ingestion. + */ + MINUTES, + + /** + * Low frequency ingestion. + */ + HOURS +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java new file mode 100644 index 0000000000..c8fe496adb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java @@ -0,0 +1,27 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.timeseries; + +/** + * The Granularity of time series data that is closest to the time span between incoming measurements. + * + * @author Christoph Strobl + * @since 3.3 + */ +public interface Granularity { + + String name(); +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java index 68c83a2757..cc215c956c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java @@ -19,12 +19,14 @@ import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import static org.springframework.data.mongodb.test.util.Assertions.*; +import com.mongodb.client.model.*; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import java.math.BigInteger; import java.time.Duration; +import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -82,6 +84,7 @@ import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.Sharded; +import org.springframework.data.mongodb.core.mapping.TimeSeries; import org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener; import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback; import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback; @@ -98,6 +101,7 @@ import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.timeseries.Granularities; import org.springframework.lang.Nullable; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.util.CollectionUtils; @@ -117,15 +121,6 @@ import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoCursor; import com.mongodb.client.MongoDatabase; -import com.mongodb.client.model.CountOptions; -import com.mongodb.client.model.CreateCollectionOptions; -import com.mongodb.client.model.DeleteOptions; -import com.mongodb.client.model.FindOneAndDeleteOptions; -import com.mongodb.client.model.FindOneAndReplaceOptions; -import com.mongodb.client.model.FindOneAndUpdateOptions; -import com.mongodb.client.model.MapReduceAction; -import com.mongodb.client.model.ReplaceOptions; -import com.mongodb.client.model.UpdateOptions; import com.mongodb.client.result.DeleteResult; import com.mongodb.client.result.UpdateResult; @@ -2256,6 +2251,30 @@ void saveErrorsOnCollectionLikeObjects() { .isThrownBy(() -> template.save(new ArrayList<>(Arrays.asList(1, 2, 3)), "myList")); } + @Test // GH-3731 + void createCollectionShouldSetUpTimeSeriesWithDefaults() { + + template.createCollection(TimeSeriesTypeWithDefaults.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("timestamp").toString()); + } + + @Test // GH-3731 + void createCollectionShouldSetUpTimeSeries() { + + template.createCollection(TimeSeriesType.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").metaField("meta").granularity(TimeSeriesGranularity.HOURS).toString()); + } + class AutogenerateableId { @Id BigInteger id; @@ -2358,6 +2377,23 @@ static class WithShardKeyPointingToNested { WithNamedFields nested; } + @TimeSeries(timeField = "timestamp") + static class TimeSeriesTypeWithDefaults { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", metaField = "meta", granularity = Granularities.HOURS) + static class TimeSeriesType { + + String id; + + @Field("time_stamp") + Instant timestamp; + Object meta; + } + static class TypeImplementingIterator implements Iterator { @Override diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java index 5c5a307f1d..17fde7ec32 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java @@ -20,15 +20,21 @@ import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import static org.springframework.data.mongodb.test.util.Assertions.assertThat; +import com.mongodb.client.model.TimeSeriesGranularity; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; +import org.springframework.data.mongodb.core.MongoTemplateUnitTests.TimeSeriesType; +import org.springframework.data.mongodb.core.MongoTemplateUnitTests.TimeSeriesTypeWithDefaults; import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.mapping.TimeSeries; +import org.springframework.data.mongodb.core.timeseries.Granularities; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; import java.time.Duration; +import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -1426,6 +1432,30 @@ void insertErrorsOnPublisher() { .isThrownBy(() -> template.insert(publisher)); } + @Test // GH-3731 + void createCollectionShouldSetUpTimeSeriesWithDefaults() { + + template.createCollection(TimeSeriesTypeWithDefaults.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("timestamp").toString()); + } + + @Test // GH-3731 + void createCollectionShouldSetUpTimeSeries() { + + template.createCollection(TimeSeriesType.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").metaField("meta").granularity(TimeSeriesGranularity.HOURS).toString()); + } + private void stubFindSubscribe(Document document) { Publisher realPublisher = Flux.just(document); @@ -1483,6 +1513,23 @@ static class EntityWithListOfSimple { List grades; } + @TimeSeries(timeField = "timestamp") + static class TimeSeriesTypeWithDefaults { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", metaField = "meta", granularity = Granularities.HOURS) + static class TimeSeriesType { + + String id; + + @Field("time_stamp") + Instant timestamp; + Object meta; + } + static class ValueCapturingEntityCallback { private final List values = new ArrayList<>(1); diff --git a/src/main/asciidoc/new-features.adoc b/src/main/asciidoc/new-features.adoc index 74458b9971..ddfa1e96ec 100644 --- a/src/main/asciidoc/new-features.adoc +++ b/src/main/asciidoc/new-features.adoc @@ -5,6 +5,7 @@ == What's New in Spring Data MongoDB 3.3 * Extended support for <> entities. +* Support for <> collections. * Include/exclude `null` properties on write to `Document` through `@Field(write=…)`. * Support for <>. diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc index fb35bb655b..84afc7ea09 100644 --- a/src/main/asciidoc/reference/mongodb.adoc +++ b/src/main/asciidoc/reference/mongodb.adoc @@ -3382,3 +3382,4 @@ class GridFsClient { include::tailable-cursors.adoc[] include::change-streams.adoc[] +include::time-series.adoc[] diff --git a/src/main/asciidoc/reference/time-series.adoc b/src/main/asciidoc/reference/time-series.adoc new file mode 100644 index 0000000000..ac36e4026e --- /dev/null +++ b/src/main/asciidoc/reference/time-series.adoc @@ -0,0 +1,45 @@ +[[time-series]] +== Time Series + +MongoDB 5.0 introduced https://docs.mongodb.com/manual/core/timeseries-collections/[Time Series] collections optimized to efficiently store sequences of measurements. +Those collections need to be actively created before inserting any data. This can be done by manually executing the command, defining time series collection options or extracting options from a `@TimeSeries` annotation as shown in the examples below. + +.Create a Time Series Collection +==== +.Create a Time Series via the MongoDB Driver +[code, java] +---- +template.execute(db -> { + + com.mongodb.client.model.CreateCollectionOptions options = new CreateCollectionOptions(); + options.timeSeriesOptions(new TimeSeriesOptions("timestamp")); + + db.createCollection("weather", options); + return "OK"; +}); +---- + +.Create a Time Series Collection with CollectionOptions +[code, java] +---- +template.createCollection("weather", CollectionOptions.timeSeries("timestamp")); +---- + +.Create a Time Series Collection derived from an Annotation +[code, java] +---- +@TimeSeries(collection="weather", timeField = "timestamp") +public class Measurement { + + String id; + Instant timestamp; + // ... +} + +template.createCollection(Measurement.class); +---- +==== + +The snippets above can easily be transferred to the reactive API offering the very same methods. +Just make sure to _subscribe_. + From f00991dc293dceee172b1ece6613dde599a0665d Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 16 Jul 2021 09:41:16 +0200 Subject: [PATCH 018/885] Polishing. Rename Granularities/Granularity to Granularity and GranularityDefinition to proivide a more natural wording towards using predefined granularities. Validate presence of referenced properties through the TimeSeries annotation. Tweak Javadoc, reformat code, add unit tests. See #3731 Original pull request: #3732. --- .../data/mongodb/core/CollectionOptions.java | 24 ++++---- .../data/mongodb/core/EntityOperations.java | 16 ++++- .../data/mongodb/core/MongoTemplate.java | 58 +++++++++--------- .../mongodb/core/ReactiveMongoTemplate.java | 22 +++---- .../data/mongodb/core/mapping/TimeSeries.java | 17 +++--- .../mongodb/core/timeseries/Granularity.java | 24 +++++++- ...rities.java => GranularityDefinition.java} | 24 +------- .../core/EntityOperationsUnitTests.java | 60 +++++++++++++++++++ .../mongodb/core/MongoTemplateUnitTests.java | 26 +++++--- .../core/ReactiveMongoTemplateUnitTests.java | 24 ++++---- src/main/asciidoc/reference/time-series.adoc | 15 ++--- 11 files changed, 198 insertions(+), 112 deletions(-) rename spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/{Granularities.java => GranularityDefinition.java} (66%) create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java index 3e509e54f2..edff52bb74 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java @@ -20,8 +20,8 @@ import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; -import org.springframework.data.mongodb.core.timeseries.Granularities; import org.springframework.data.mongodb.core.timeseries.Granularity; +import org.springframework.data.mongodb.core.timeseries.GranularityDefinition; import org.springframework.data.mongodb.core.validation.Validator; import org.springframework.data.util.Optionals; import org.springframework.lang.Nullable; @@ -100,7 +100,7 @@ public static CollectionOptions empty() { /** * Quick way to set up {@link CollectionOptions} for a Time Series collection. For more advanced settings use * {@link #timeSeries(TimeSeriesOptions)}. - * + * * @param timeField The name of the property which contains the date in each time series document. Must not be * {@literal null}. * @return new instance of {@link CollectionOptions}. @@ -454,12 +454,13 @@ public static class TimeSeriesOptions { private final String timeField; - @Nullable // - private String metaField; + private @Nullable final String metaField; + + private final GranularityDefinition granularity; - private Granularity granularity; + private TimeSeriesOptions(String timeField, @Nullable String metaField, GranularityDefinition granularity) { - private TimeSeriesOptions(String timeField, @Nullable String metaField, Granularity granularity) { + Assert.hasText(timeField, "Time field must not be empty or null!"); this.timeField = timeField; this.metaField = metaField; @@ -475,7 +476,7 @@ private TimeSeriesOptions(String timeField, @Nullable String metaField, Granular * @return new instance of {@link TimeSeriesOptions}. */ public static TimeSeriesOptions timeSeries(String timeField) { - return new TimeSeriesOptions(timeField, null, Granularities.DEFAULT); + return new TimeSeriesOptions(timeField, null, Granularity.DEFAULT); } /** @@ -492,12 +493,13 @@ public TimeSeriesOptions metaField(String metaField) { } /** - * Select the {@link Granularity} parameter to define how data in the time series collection is organized. Select - * one that is closest to the time span between incoming measurements. + * Select the {@link GranularityDefinition} parameter to define how data in the time series collection is organized. + * Select one that is closest to the time span between incoming measurements. * * @return new instance of {@link TimeSeriesOptions}. + * @see Granularity */ - public TimeSeriesOptions granularity(Granularity granularity) { + public TimeSeriesOptions granularity(GranularityDefinition granularity) { return new TimeSeriesOptions(timeField, metaField, granularity); } @@ -520,7 +522,7 @@ public String getMetaField() { /** * @return never {@literal null}. */ - public Granularity getGranularity() { + public GranularityDefinition getGranularity() { return granularity; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java index 9fb8836e1a..3bba17aaef 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java @@ -38,7 +38,7 @@ import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.mongodb.core.timeseries.Granularities; +import org.springframework.data.mongodb.core.timeseries.Granularity; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; @@ -898,11 +898,23 @@ public CollectionOptions getCollectionOptions() { if (entity.isAnnotationPresent(TimeSeries.class)) { TimeSeries timeSeries = entity.getRequiredAnnotation(TimeSeries.class); + + if (entity.getPersistentProperty(timeSeries.timeField()) == null) { + throw new MappingException(String.format("Time series field '%s' does not exist in type %s", + timeSeries.timeField(), entity.getName())); + } + TimeSeriesOptions options = TimeSeriesOptions.timeSeries(timeSeries.timeField()); if (StringUtils.hasText(timeSeries.metaField())) { + + if (entity.getPersistentProperty(timeSeries.metaField()) == null) { + throw new MappingException( + String.format("Meta field '%s' does not exist in type %s", timeSeries.metaField(), entity.getName())); + } + options = options.metaField(timeSeries.metaField()); } - if (!Granularities.DEFAULT.equals(timeSeries.granularity())) { + if (!Granularity.DEFAULT.equals(timeSeries.granularity())) { options = options.granularity(timeSeries.granularity()); } collectionOptions = collectionOptions.timeSeries(options); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java index c833e511bf..fb0780c5c8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java @@ -99,7 +99,7 @@ import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; -import org.springframework.data.mongodb.core.timeseries.Granularities; +import org.springframework.data.mongodb.core.timeseries.Granularity; import org.springframework.data.mongodb.core.validation.Validator; import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.data.projection.SpelAwareProxyProjectionFactory; @@ -2436,14 +2436,15 @@ protected MongoCollection doCreateCollection(String collectionName, Do co.validationOptions(options); } - if(collectionOptions.containsKey("timeseries")) { + if (collectionOptions.containsKey("timeseries")) { Document timeSeries = collectionOptions.get("timeseries", Document.class); - com.mongodb.client.model.TimeSeriesOptions options = new com.mongodb.client.model.TimeSeriesOptions(timeSeries.getString("timeField")); - if(timeSeries.containsKey("metaField")) { + com.mongodb.client.model.TimeSeriesOptions options = new com.mongodb.client.model.TimeSeriesOptions( + timeSeries.getString("timeField")); + if (timeSeries.containsKey("metaField")) { options.metaField(timeSeries.getString("metaField")); } - if(timeSeries.containsKey("granularity")) { + if (timeSeries.containsKey("granularity")) { options.granularity(TimeSeriesGranularity.valueOf(timeSeries.getString("granularity").toUpperCase())); } co.timeSeriesOptions(options); @@ -2604,17 +2605,18 @@ protected Document convertToDocument(@Nullable CollectionOptions collectionOptio collectionOptions.getValidationOptions().ifPresent(it -> it.getValidator() // .ifPresent(val -> doc.put("validator", getMappedValidator(val, targetType)))); - collectionOptions.getTimeSeriesOptions().map(operations.forType(targetType)::mapTimeSeriesOptions).ifPresent(it -> { + collectionOptions.getTimeSeriesOptions().map(operations.forType(targetType)::mapTimeSeriesOptions) + .ifPresent(it -> { - Document timeseries = new Document("timeField", it.getTimeField()); - if(StringUtils.hasText(it.getMetaField())) { - timeseries.append("metaField", it.getMetaField()); - } - if(!Granularities.DEFAULT.equals(it.getGranularity())) { - timeseries.append("granularity", it.getGranularity().name().toLowerCase()); - } - doc.put("timeseries", timeseries); - }); + Document timeseries = new Document("timeField", it.getTimeField()); + if (StringUtils.hasText(it.getMetaField())) { + timeseries.append("metaField", it.getMetaField()); + } + if (!Granularity.DEFAULT.equals(it.getGranularity())) { + timeseries.append("granularity", it.getGranularity().name().toLowerCase()); + } + doc.put("timeseries", timeseries); + }); } return doc; @@ -2849,9 +2851,9 @@ private void executeQueryInternal(CollectionCallback> col .initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection) .iterator()) { - while (cursor.hasNext()) { - callbackHandler.processDocument(cursor.next()); - } + while (cursor.hasNext()) { + callbackHandler.processDocument(cursor.next()); + } } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } @@ -3175,17 +3177,17 @@ private class ReadDocumentCallback implements DocumentCallback { public T doWith(Document document) { - maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName)); - T entity = reader.read(type, document); + maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName)); + T entity = reader.read(type, document); - if (entity == null) { - throw new MappingException(String.format("EntityReader %s returned null", reader)); - } + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", reader)); + } - maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); - entity = maybeCallAfterConvert(entity, document, collectionName); + maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); + entity = maybeCallAfterConvert(entity, document, collectionName); - return entity; + return entity; } } @@ -3237,8 +3239,8 @@ public T doWith(Document document) { Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, entity) : entity; - maybeEmitEvent(new AfterConvertEvent<>(document, result, collectionName)); - return (T) maybeCallAfterConvert(result, document, collectionName); + maybeEmitEvent(new AfterConvertEvent<>(document, result, collectionName)); + return (T) maybeCallAfterConvert(result, document, collectionName); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java index 2403e9a394..82a3d12260 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java @@ -17,7 +17,6 @@ import static org.springframework.data.mongodb.core.query.SerializationUtils.*; -import org.springframework.data.mongodb.core.timeseries.Granularities; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.util.function.Tuple2; @@ -111,6 +110,7 @@ import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.data.mongodb.core.timeseries.Granularity; import org.springframework.data.mongodb.core.validation.Validator; import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.data.projection.SpelAwareProxyProjectionFactory; @@ -975,7 +975,8 @@ public Flux aggregate(Aggregation aggregation, String collectionName, Cla return doAggregate(aggregation, collectionName, null, outputType); } - protected Flux doAggregate(Aggregation aggregation, String collectionName, @Nullable Class inputType, Class outputType) { + protected Flux doAggregate(Aggregation aggregation, String collectionName, @Nullable Class inputType, + Class outputType) { Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); Assert.hasText(collectionName, "Collection name must not be null or empty!"); @@ -987,19 +988,18 @@ protected Flux doAggregate(Aggregation aggregation, String collectionName AggregationDefinition ctx = queryOperations.createAggregation(aggregation, inputType); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(ctx.getAggregationPipeline()), collectionName); + LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(ctx.getAggregationPipeline()), + collectionName); } ReadDocumentCallback readCallback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName); - return execute(collectionName, - collection -> aggregateAndMap(collection, ctx.getAggregationPipeline(), ctx.isOutOrMerge(), options, - readCallback, - ctx.getInputType())); + return execute(collectionName, collection -> aggregateAndMap(collection, ctx.getAggregationPipeline(), + ctx.isOutOrMerge(), options, readCallback, ctx.getInputType())); } private Flux aggregateAndMap(MongoCollection collection, List pipeline, - boolean isOutOrMerge, - AggregationOptions options, ReadDocumentCallback readCallback, @Nullable Class inputType) { + boolean isOutOrMerge, AggregationOptions options, ReadDocumentCallback readCallback, + @Nullable Class inputType) { AggregatePublisher cursor = collection.aggregate(pipeline, Document.class) .allowDiskUse(options.isAllowDiskUse()); @@ -2510,10 +2510,10 @@ protected CreateCollectionOptions convertToCreateCollectionOptions(@Nullable Col TimeSeriesOptions options = new TimeSeriesOptions(it.getTimeField()); - if(StringUtils.hasText(it.getMetaField())) { + if (StringUtils.hasText(it.getMetaField())) { options.metaField(it.getMetaField()); } - if(!Granularities.DEFAULT.equals(it.getGranularity())) { + if (!Granularity.DEFAULT.equals(it.getGranularity())) { options.granularity(TimeSeriesGranularity.valueOf(it.getGranularity().name().toUpperCase())); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java index 8a5fe255e0..d3f694f539 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java @@ -22,7 +22,7 @@ import java.lang.annotation.Target; import org.springframework.core.annotation.AliasFor; -import org.springframework.data.mongodb.core.timeseries.Granularities; +import org.springframework.data.mongodb.core.timeseries.Granularity; /** * Identifies a domain object to be persisted to a MongoDB Time Series collection. @@ -50,8 +50,9 @@ String collection() default ""; /** - * The name of the property which contains the date in each time series document.
- * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * Name of the property which contains the date in each time series document.
+ * Translation of property names to {@link Field#name() annotated fieldnames} will be considered during the mapping + * process. * * @return never {@literal null}. */ @@ -60,19 +61,19 @@ /** * The name of the field which contains metadata in each time series document. Should not be the {@literal id} nor * {@link #timeField()} nor point to an {@literal array} or {@link java.util.Collection}.
- * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * Translation of property names to {@link Field#name() annotated fieldnames} will be considered during the mapping + * process. * * @return empty {@link String} by default. */ String metaField() default ""; /** - * Select the {@link Granularities granularity} parameter to define how data in the time series collection is - * organized. + * Select the {@link Granularity granularity} parameter to define how data in the time series collection is organized. * - * @return {@link Granularities#DEFAULT server default} by default. + * @return {@link Granularity#DEFAULT server default} by default. */ - Granularities granularity() default Granularities.DEFAULT; + Granularity granularity() default Granularity.DEFAULT; /** * Defines the collation to apply when executing a query or creating indexes. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java index c8fe496adb..30ae007fc6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java @@ -16,12 +16,30 @@ package org.springframework.data.mongodb.core.timeseries; /** - * The Granularity of time series data that is closest to the time span between incoming measurements. + * {@link GranularityDefinition Granularities} available for Time Series data. * * @author Christoph Strobl * @since 3.3 */ -public interface Granularity { +public enum Granularity implements GranularityDefinition { - String name(); + /** + * Server default value to indicate no explicit value should be sent. + */ + DEFAULT, + + /** + * High frequency ingestion. + */ + SECONDS, + + /** + * Medium frequency ingestion. + */ + MINUTES, + + /** + * Low frequency ingestion. + */ + HOURS } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularities.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/GranularityDefinition.java similarity index 66% rename from spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularities.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/GranularityDefinition.java index f4cac5232c..06f77cb594 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularities.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/GranularityDefinition.java @@ -16,30 +16,12 @@ package org.springframework.data.mongodb.core.timeseries; /** - * {@link Granularity Granularities} available for Time Series data. + * The Granularity of time series data that is closest to the time span between incoming measurements. * * @author Christoph Strobl * @since 3.3 */ -public enum Granularities implements Granularity { +public interface GranularityDefinition { - /** - * Server default value to indicate no explicit value should be sent. - */ - DEFAULT, - - /** - * High frequency ingestion. - */ - SECONDS, - - /** - * Medium frequency ingestion. - */ - MINUTES, - - /** - * Low frequency ingestion. - */ - HOURS + String name(); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java new file mode 100644 index 0000000000..901ac1f0dd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java @@ -0,0 +1,60 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import java.time.Instant; + +import org.junit.jupiter.api.Test; + +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.TimeSeries; + +/** + * Unit tests for {@link EntityOperations}. + * + * @author Mark Paluch + */ +class EntityOperationsUnitTests { + + EntityOperations operations = new EntityOperations(new MongoMappingContext()); + + @Test // GH-3731 + void shouldReportInvalidTimeField() { + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> operations.forType(InvalidTimeField.class).getCollectionOptions()) + .withMessageContaining("Time series field 'foo' does not exist"); + } + + @Test // GH-3731 + void shouldReportInvalidMetaField() { + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> operations.forType(InvalidMetaField.class).getCollectionOptions()) + .withMessageContaining("Meta field 'foo' does not exist"); + } + + @TimeSeries(timeField = "foo") + static class InvalidTimeField { + + } + + @TimeSeries(timeField = "time", metaField = "foo") + static class InvalidMetaField { + Instant time; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java index cc215c956c..147d2e49c3 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java @@ -19,7 +19,6 @@ import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import static org.springframework.data.mongodb.test.util.Assertions.*; -import com.mongodb.client.model.*; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; @@ -101,7 +100,7 @@ import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; -import org.springframework.data.mongodb.core.timeseries.Granularities; +import org.springframework.data.mongodb.core.timeseries.Granularity; import org.springframework.lang.Nullable; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.util.CollectionUtils; @@ -121,6 +120,16 @@ import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoCursor; import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CountOptions; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.FindOneAndDeleteOptions; +import com.mongodb.client.model.FindOneAndReplaceOptions; +import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.MapReduceAction; +import com.mongodb.client.model.ReplaceOptions; +import com.mongodb.client.model.TimeSeriesGranularity; +import com.mongodb.client.model.UpdateOptions; import com.mongodb.client.result.DeleteResult; import com.mongodb.client.result.UpdateResult; @@ -1982,7 +1991,8 @@ void shouldIncludeValueFromNestedShardKeyPath() { ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); verify(collection).replaceOne(filter.capture(), any(), any()); - assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1").append("value", "v1").append("nested.custom-named-field", "cname")); + assertThat(filter.getValue()) + .isEqualTo(new Document("_id", "id-1").append("value", "v1").append("nested.custom-named-field", "cname")); } @Test // DATAMONGO-2341 @@ -2272,7 +2282,8 @@ void createCollectionShouldSetUpTimeSeries() { verify(db).createCollection(any(), options.capture()); assertThat(options.getValue().getTimeSeriesOptions().toString()) - .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").metaField("meta").granularity(TimeSeriesGranularity.HOURS).toString()); + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").metaField("meta") + .granularity(TimeSeriesGranularity.HOURS).toString()); } class AutogenerateableId { @@ -2370,7 +2381,7 @@ static class Sith { @Field("firstname") String name; } - @Sharded(shardKey = {"value", "nested.customName"}) + @Sharded(shardKey = { "value", "nested.customName" }) static class WithShardKeyPointingToNested { String id; String value; @@ -2384,13 +2395,12 @@ static class TimeSeriesTypeWithDefaults { Instant timestamp; } - @TimeSeries(timeField = "timestamp", metaField = "meta", granularity = Granularities.HOURS) + @TimeSeries(timeField = "timestamp", metaField = "meta", granularity = Granularity.HOURS) static class TimeSeriesType { String id; - @Field("time_stamp") - Instant timestamp; + @Field("time_stamp") Instant timestamp; Object meta; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java index 17fde7ec32..10e4f1cfcc 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java @@ -20,15 +20,9 @@ import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import static org.springframework.data.mongodb.test.util.Assertions.assertThat; -import com.mongodb.client.model.TimeSeriesGranularity; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; -import org.springframework.data.mongodb.core.MongoTemplateUnitTests.TimeSeriesType; -import org.springframework.data.mongodb.core.MongoTemplateUnitTests.TimeSeriesTypeWithDefaults; -import org.springframework.data.mongodb.core.convert.MongoCustomConversions; -import org.springframework.data.mongodb.core.mapping.TimeSeries; -import org.springframework.data.mongodb.core.timeseries.Granularities; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; @@ -77,9 +71,11 @@ import org.springframework.data.mongodb.core.aggregation.Fields; import org.springframework.data.mongodb.core.aggregation.SetOperation; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.TimeSeries; import org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener; import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterConvertCallback; @@ -93,6 +89,7 @@ import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.timeseries.Granularity; import org.springframework.lang.Nullable; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.util.CollectionUtils; @@ -106,6 +103,7 @@ import com.mongodb.client.model.FindOneAndReplaceOptions; import com.mongodb.client.model.FindOneAndUpdateOptions; import com.mongodb.client.model.ReplaceOptions; +import com.mongodb.client.model.TimeSeriesGranularity; import com.mongodb.client.model.UpdateOptions; import com.mongodb.client.result.DeleteResult; import com.mongodb.client.result.InsertManyResult; @@ -951,7 +949,8 @@ void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForF @Test // DATAMONGO-2344, DATAMONGO-2572 void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForFindDistinct() { - template.findDistinct(new Query().allowSecondaryReads(), "name", AutogenerateableId.class, String.class).subscribe(); + template.findDistinct(new Query().allowSecondaryReads(), "name", AutogenerateableId.class, String.class) + .subscribe(); verify(collection).withReadPreference(eq(ReadPreference.primaryPreferred())); } @@ -1428,8 +1427,7 @@ void insertErrorsOnPublisher() { Publisher publisher = Mono.just("data"); - assertThatExceptionOfType(IllegalArgumentException.class) - .isThrownBy(() -> template.insert(publisher)); + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> template.insert(publisher)); } @Test // GH-3731 @@ -1453,7 +1451,8 @@ void createCollectionShouldSetUpTimeSeries() { verify(db).createCollection(any(), options.capture()); assertThat(options.getValue().getTimeSeriesOptions().toString()) - .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").metaField("meta").granularity(TimeSeriesGranularity.HOURS).toString()); + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").metaField("meta") + .granularity(TimeSeriesGranularity.HOURS).toString()); } private void stubFindSubscribe(Document document) { @@ -1520,13 +1519,12 @@ static class TimeSeriesTypeWithDefaults { Instant timestamp; } - @TimeSeries(timeField = "timestamp", metaField = "meta", granularity = Granularities.HOURS) + @TimeSeries(timeField = "timestamp", metaField = "meta", granularity = Granularity.HOURS) static class TimeSeriesType { String id; - @Field("time_stamp") - Instant timestamp; + @Field("time_stamp") Instant timestamp; Object meta; } diff --git a/src/main/asciidoc/reference/time-series.adoc b/src/main/asciidoc/reference/time-series.adoc index ac36e4026e..54601a8ed1 100644 --- a/src/main/asciidoc/reference/time-series.adoc +++ b/src/main/asciidoc/reference/time-series.adoc @@ -1,13 +1,14 @@ [[time-series]] == Time Series -MongoDB 5.0 introduced https://docs.mongodb.com/manual/core/timeseries-collections/[Time Series] collections optimized to efficiently store sequences of measurements. -Those collections need to be actively created before inserting any data. This can be done by manually executing the command, defining time series collection options or extracting options from a `@TimeSeries` annotation as shown in the examples below. +MongoDB 5.0 introduced https://docs.mongodb.com/manual/core/timeseries-collections/[Time Series] collections that are optimized to efficiently store documents over time such as measurements or events. +Those collections need to be created as such before inserting any data. +Collections can be created by either running the `createCollection` command, defining time series collection options or extracting options from a `@TimeSeries` annotation as shown in the examples below. .Create a Time Series Collection ==== .Create a Time Series via the MongoDB Driver -[code, java] +[code,java] ---- template.execute(db -> { @@ -19,14 +20,14 @@ template.execute(db -> { }); ---- -.Create a Time Series Collection with CollectionOptions -[code, java] +.Create a Time Series Collection with `CollectionOptions` +[code,java] ---- template.createCollection("weather", CollectionOptions.timeSeries("timestamp")); ---- .Create a Time Series Collection derived from an Annotation -[code, java] +[code,java] ---- @TimeSeries(collection="weather", timeField = "timestamp") public class Measurement { @@ -41,5 +42,5 @@ template.createCollection(Measurement.class); ==== The snippets above can easily be transferred to the reactive API offering the very same methods. -Just make sure to _subscribe_. +Make sure to properly _subscribe_ to the returned publishers. From 9db9d16cf8348db8ab86e9edf6b48dd2ab367db3 Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Fri, 16 Jul 2021 10:18:49 +0200 Subject: [PATCH 019/885] Updated changelog. See #3681 --- src/main/resources/changelog.txt | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/main/resources/changelog.txt b/src/main/resources/changelog.txt index 18ac26a430..929482612f 100644 --- a/src/main/resources/changelog.txt +++ b/src/main/resources/changelog.txt @@ -1,6 +1,12 @@ Spring Data MongoDB Changelog ============================= +Changes in version 3.1.11 (2021-07-16) +-------------------------------------- +* #3689 - Fix Regression in generating queries with nested maps with numeric keys. +* #3688 - Multiple maps with numeric keys in a single update produces the wrong query (Regression). + + Changes in version 3.2.2 (2021-06-22) ------------------------------------- * #3677 - Add missing double quote to GeoJson.java JSDoc header. @@ -3464,5 +3470,6 @@ Repository + From e875f9ea334567408b01fb0233b5854fc035ee93 Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Fri, 16 Jul 2021 14:08:43 +0200 Subject: [PATCH 020/885] Updated changelog. See #3631 --- src/main/resources/changelog.txt | 35 ++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/src/main/resources/changelog.txt b/src/main/resources/changelog.txt index 929482612f..dff169e6fc 100644 --- a/src/main/resources/changelog.txt +++ b/src/main/resources/changelog.txt @@ -1,6 +1,40 @@ Spring Data MongoDB Changelog ============================= +Changes in version 3.3.0-M1 (2021-07-16) +---------------------------------------- +* #3731 - Add support for creating time series collection. +* #3706 - Upgrade to MongoDB 4.3.0 Drivers. +* #3705 - Adapt to changes in AssertJ 3.20. +* #3702 - `MappingMongoConverter` incorrectly processes an object property of type `org.bson.Document`. +* #3700 - Adapt to consolidated PersistentEntity API. +* #3693 - Upgrade to MongoDB 4.3.0-beta4 Drivers. +* #3689 - Fix Regression in generating queries with nested maps with numeric keys. +* #3688 - Multiple maps with numeric keys in a single update produces the wrong query (Regression). +* #3686 - reading a document with a list with a null element fails with Spring Data Mongo 3.2.2, works with 3.2.1. +* #3684 - Add equals and hashcode to UnwrappedMongoPersistentProperty (fixes #3683). +* #3683 - Memory Leak: instances of UnwrappedMongoPersistentProperty are accumulating in PreferredConstructor.isPropertyParameterCache. +* #3677 - Add missing double quote to GeoJson.java JSDoc header. +* #3674 - Upgrade to Querydsl 5.0. +* #3672 - Directly import JSR305 jar. +* #3670 - `Binary` not deserialized to `byte[]` for property of type `Object`. +* #3668 - Projection on the _id field returns wrong result when using `@MongoId` (MongoDB 4.4). +* #3666 - Documentation references outdated `Mongo` client. +* #3660 - MappingMongoConverter problem: ConversionContext#convert does not try to use custom converters first. +* #3659 - [3.2.1] Indexing Class with Custom Converter -> Couldn't find PersistentEntity for property private [...]. +* #3656 - Fix Build on Java 16. +* #3648 - Inconsistent nullability of read() in templates: not sure if mapper can skip a document. +* #3638 - Introduce template method for easier customization of fragments. +* #3635 - $floor isOrOrNor() return true. +* #3633 - NPE in QueryMapper when use Query with `null` as value. +* #3632 - Fix bullet points in aggregations framework asciidoc. +* #3603 - Update CI to Java 16. +* #3602 - Add support for flexible document references. +* #3543 - Aggregation query method should be able to return `Slice` and `Stream`. +* #3407 - Add an option to @Field annotation to control property write rules [DATAMONGO-2551]. +* #3225 - Add support for Wildcard Indexes [DATAMONGO-2368]. + + Changes in version 3.1.11 (2021-07-16) -------------------------------------- * #3689 - Fix Regression in generating queries with nested maps with numeric keys. @@ -3471,5 +3505,6 @@ Repository + From b6ad32d7d4dd6bd6433695425349c282337c77ee Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Fri, 16 Jul 2021 14:08:59 +0200 Subject: [PATCH 021/885] Prepare 3.3 M1 (2021.1.0). See #3631 --- pom.xml | 8 ++++---- src/main/resources/notice.txt | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/pom.xml b/pom.xml index b688f3ee50..382f4470c9 100644 --- a/pom.xml +++ b/pom.xml @@ -15,7 +15,7 @@ org.springframework.data.build spring-data-parent - 2.6.0-SNAPSHOT + 2.6.0-M1 @@ -26,7 +26,7 @@ multi spring-data-mongodb - 2.6.0-SNAPSHOT + 2.6.0-M1 4.3.0 ${mongo} 1.19 @@ -134,8 +134,8 @@ - spring-libs-snapshot - https://repo.spring.io/libs-snapshot + spring-libs-milestone + https://repo.spring.io/libs-milestone sonatype-libs-snapshot diff --git a/src/main/resources/notice.txt b/src/main/resources/notice.txt index 07cf1da6a0..624bf48c30 100644 --- a/src/main/resources/notice.txt +++ b/src/main/resources/notice.txt @@ -1,4 +1,4 @@ -Spring Data MongoDB 3.2 GA (2021.0.0) +Spring Data MongoDB 3.3 M1 (2021.1.0) Copyright (c) [2010-2019] Pivotal Software, Inc. This product is licensed to you under the Apache License, Version 2.0 (the "License"). @@ -27,3 +27,4 @@ conditions of the subcomponent's license, as noted in the LICENSE file. + From 4ef1ff6aff2deffb5daa3b8910c4f98ca789660f Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Fri, 16 Jul 2021 14:09:25 +0200 Subject: [PATCH 022/885] Release version 3.3 M1 (2021.1.0). See #3631 --- pom.xml | 2 +- spring-data-mongodb-benchmarks/pom.xml | 2 +- spring-data-mongodb-distribution/pom.xml | 2 +- spring-data-mongodb/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index 382f4470c9..9a83ef35ef 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M1 pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index 0033bd11d5..0557a133c7 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M1 ../pom.xml diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index f62c8dc7f4..d7fbc46ed6 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -14,7 +14,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M1 ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 1f157e75bc..6bb6e8ca56 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M1 ../pom.xml From e7f3a2436d24262b587f0c4fc2aa8ae371e688d3 Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Fri, 16 Jul 2021 14:19:56 +0200 Subject: [PATCH 023/885] Prepare next development iteration. See #3631 --- pom.xml | 2 +- spring-data-mongodb-benchmarks/pom.xml | 2 +- spring-data-mongodb-distribution/pom.xml | 2 +- spring-data-mongodb/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index 9a83ef35ef..382f4470c9 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M1 + 3.3.0-SNAPSHOT pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index 0557a133c7..0033bd11d5 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M1 + 3.3.0-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index d7fbc46ed6..f62c8dc7f4 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -14,7 +14,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M1 + 3.3.0-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 6bb6e8ca56..1f157e75bc 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M1 + 3.3.0-SNAPSHOT ../pom.xml From 4d7ee0e7415be66d59068d5dbfbbb595530acfaa Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Fri, 16 Jul 2021 14:19:58 +0200 Subject: [PATCH 024/885] After release cleanups. See #3631 --- pom.xml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index 382f4470c9..b688f3ee50 100644 --- a/pom.xml +++ b/pom.xml @@ -15,7 +15,7 @@ org.springframework.data.build spring-data-parent - 2.6.0-M1 + 2.6.0-SNAPSHOT @@ -26,7 +26,7 @@ multi spring-data-mongodb - 2.6.0-M1 + 2.6.0-SNAPSHOT 4.3.0 ${mongo} 1.19 @@ -134,8 +134,8 @@ - spring-libs-milestone - https://repo.spring.io/libs-milestone + spring-libs-snapshot + https://repo.spring.io/libs-snapshot sonatype-libs-snapshot From d2c9b47366aca3da4acbd0221f4c076fb81c0b61 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Thu, 22 Jul 2021 09:03:10 +0200 Subject: [PATCH 025/885] Fix issues related to Querydsl 5.0 upgrade. Remove overridden methods no longer available in public api. Closes: #3738 --- .../support/QuerydslAbstractMongodbQuery.java | 10 ---------- .../support/SpringDataMongodbQuerySupport.java | 5 ----- 2 files changed, 15 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAbstractMongodbQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAbstractMongodbQuery.java index b255d20273..80c485ea3f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAbstractMongodbQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAbstractMongodbQuery.java @@ -177,16 +177,6 @@ protected Document createSort(List> orderSpecifiers) { return serializer.toSort(orderSpecifiers); } - /** - * Get the actual {@link QueryMixin} delegate. - * - * @return - */ - QueryMixin getQueryMixin() { - return queryMixin; - } - - /** * Returns the {@literal Mongo Shell} representation of the query.
* The following query diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java index 406019cf4d..be9260df48 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java @@ -139,9 +139,4 @@ public String toJson(JsonWriterSettings settings) { protected Document createSort(List> orderSpecifiers) { return serializer.toSort(orderSpecifiers); } - - // TODO: Remove once https://github.com/querydsl/querydsl/pull/2916 is merged - QueryMixin getQueryMixin() { - return superQueryMixin; - } } From 68370c16fb288034f68b2ca2d38f5480e1656e2f Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Mon, 26 Jul 2021 14:14:31 +0200 Subject: [PATCH 026/885] =?UTF-8?q?Run=20unpaged=20query=20using=20Pageabl?= =?UTF-8?q?e.unpaged()=20through=20QuerydslMongoPredicateExecutor.findAll(?= =?UTF-8?q?=E2=80=A6).?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit We now correctly consider unpaged queries if the Pageable is unpaged. Closes: #3751 Original Pull Request: #3754 --- .../support/QuerydslMongoPredicateExecutor.java | 4 ++++ ...slMongoPredicateExecutorIntegrationTests.java | 16 ++++++++++++++++ 2 files changed, 20 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java index 95d2299670..d92d7ad129 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java @@ -212,6 +212,10 @@ private SpringDataMongodbQuery createQuery() { */ private SpringDataMongodbQuery applyPagination(SpringDataMongodbQuery query, Pageable pageable) { + if (pageable.isUnpaged()) { + return query; + } + query = query.offset(pageable.getOffset()).limit(pageable.getPageSize()); return applySorting(query, pageable.getSort()); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutorIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutorIntegrationTests.java index 6b46618fdb..782e46b134 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutorIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutorIntegrationTests.java @@ -27,6 +27,8 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.springframework.dao.PermissionDeniedDataAccessException; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.mongodb.MongoDatabaseFactory; @@ -122,6 +124,20 @@ public void findUsingAndShouldWork() { .containsExactly(dave); } + @Test // GH-3751 + public void findPage() { + + assertThat(repository + .findAll(person.lastname.startsWith(oliver.getLastname()).and(person.firstname.startsWith(dave.getFirstname())), + PageRequest.of(0, 10)) + .getContent()).containsExactly(dave); + + assertThat(repository + .findAll(person.lastname.startsWith(oliver.getLastname()).and(person.firstname.startsWith(dave.getFirstname())), + Pageable.unpaged()) + .getContent()).containsExactly(dave); + } + @Test // DATAMONGO-362, DATAMONGO-1848 public void springDataMongodbQueryShouldAllowJoinOnDBref() { From 45971b212c12c67e4233d1b139de06ba088e18ee Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Mon, 26 Jul 2021 14:15:24 +0200 Subject: [PATCH 027/885] Polishing. Move off deprecated classes. Add unpaged testcase for query by example. Original Pull Request: #3754 --- .../support/QuerydslMongoPredicateExecutor.java | 2 +- .../support/SimpleMongoRepository.java | 2 +- .../support/SimpleMongoRepositoryTests.java | 16 +++++++++++++++- 3 files changed, 17 insertions(+), 3 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java index d92d7ad129..569273afb5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java @@ -27,7 +27,7 @@ import org.springframework.data.querydsl.EntityPathResolver; import org.springframework.data.querydsl.QuerydslPredicateExecutor; import org.springframework.data.querydsl.SimpleEntityPathResolver; -import org.springframework.data.repository.support.PageableExecutionUtils; +import org.springframework.data.support.PageableExecutionUtils; import org.springframework.util.Assert; import com.querydsl.core.NonUniqueResultException; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepository.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepository.java index 4ffba8a6a3..1443474b8f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepository.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepository.java @@ -36,7 +36,7 @@ import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.repository.MongoRepository; import org.springframework.data.mongodb.repository.query.MongoEntityInformation; -import org.springframework.data.repository.support.PageableExecutionUtils; +import org.springframework.data.support.PageableExecutionUtils; import org.springframework.data.util.StreamUtils; import org.springframework.data.util.Streamable; import org.springframework.lang.Nullable; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryTests.java index 135b6b3888..61cd78ea93 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryTests.java @@ -30,10 +30,11 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; + import org.springframework.data.domain.Example; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; -import org.springframework.data.domain.ExampleMatcher.*; +import org.springframework.data.domain.Pageable; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.MongoTransactionManager; import org.springframework.data.mongodb.core.geo.GeoJsonPoint; @@ -177,6 +178,19 @@ void findByExampleShouldLookUpEntriesCorrectly() { assertThat(result.getTotalPages()).isEqualTo(1); } + @Test // GH-3751 + void findByExampleShouldReturnUnpagedResults() { + + Person sample = new Person(); + sample.setLastname("Matthews"); + trimDomainType(sample, "id", "createdAt", "email"); + + Page result = repository.findAll(Example.of(sample), Pageable.unpaged()); + + assertThat(result.getContent()).hasSize(2).contains(dave, oliver); + assertThat(result.getTotalPages()).isEqualTo(1); + } + @Test // DATAMONGO-1464 void findByExampleMultiplePagesShouldLookUpEntriesCorrectly() { From 454afd9877b6a7d2c164461f766a294cd907141f Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Thu, 12 Aug 2021 15:02:56 +0200 Subject: [PATCH 028/885] Prepare 3.3 M2 (2021.1.0). See #3736 --- pom.xml | 8 ++++---- src/main/resources/notice.txt | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/pom.xml b/pom.xml index b688f3ee50..b6477961a1 100644 --- a/pom.xml +++ b/pom.xml @@ -15,7 +15,7 @@ org.springframework.data.build spring-data-parent - 2.6.0-SNAPSHOT + 2.6.0-M2 @@ -26,7 +26,7 @@ multi spring-data-mongodb - 2.6.0-SNAPSHOT + 2.6.0-M2 4.3.0 ${mongo} 1.19 @@ -134,8 +134,8 @@ - spring-libs-snapshot - https://repo.spring.io/libs-snapshot + spring-libs-milestone + https://repo.spring.io/libs-milestone sonatype-libs-snapshot diff --git a/src/main/resources/notice.txt b/src/main/resources/notice.txt index 624bf48c30..29628c3570 100644 --- a/src/main/resources/notice.txt +++ b/src/main/resources/notice.txt @@ -1,4 +1,4 @@ -Spring Data MongoDB 3.3 M1 (2021.1.0) +Spring Data MongoDB 3.3 M2 (2021.1.0) Copyright (c) [2010-2019] Pivotal Software, Inc. This product is licensed to you under the Apache License, Version 2.0 (the "License"). @@ -27,4 +27,5 @@ conditions of the subcomponent's license, as noted in the LICENSE file. + From 87ab1ac48ceaec1f014a6f50148cb6b4d9c20544 Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Thu, 12 Aug 2021 15:03:17 +0200 Subject: [PATCH 029/885] Release version 3.3 M2 (2021.1.0). See #3736 --- pom.xml | 2 +- spring-data-mongodb-benchmarks/pom.xml | 2 +- spring-data-mongodb-distribution/pom.xml | 2 +- spring-data-mongodb/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index b6477961a1..ff938bcceb 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M2 pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index 0033bd11d5..31260b443f 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M2 ../pom.xml diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index f62c8dc7f4..8e0bf96e21 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -14,7 +14,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M2 ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 1f157e75bc..42bf7aa081 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M2 ../pom.xml From 828c07416794d75c8e41753a53e1c2b3d96ac565 Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Thu, 12 Aug 2021 15:16:21 +0200 Subject: [PATCH 030/885] Prepare next development iteration. See #3736 --- pom.xml | 2 +- spring-data-mongodb-benchmarks/pom.xml | 2 +- spring-data-mongodb-distribution/pom.xml | 2 +- spring-data-mongodb/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index ff938bcceb..b6477961a1 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M2 + 3.3.0-SNAPSHOT pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index 31260b443f..0033bd11d5 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M2 + 3.3.0-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index 8e0bf96e21..f62c8dc7f4 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -14,7 +14,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M2 + 3.3.0-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 42bf7aa081..1f157e75bc 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M2 + 3.3.0-SNAPSHOT ../pom.xml From 7538b1a1a57a6fb4d1a2cfc54fda095a5d29ee8e Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Thu, 12 Aug 2021 15:16:23 +0200 Subject: [PATCH 031/885] After release cleanups. See #3736 --- pom.xml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index b6477961a1..b688f3ee50 100644 --- a/pom.xml +++ b/pom.xml @@ -15,7 +15,7 @@ org.springframework.data.build spring-data-parent - 2.6.0-M2 + 2.6.0-SNAPSHOT @@ -26,7 +26,7 @@ multi spring-data-mongodb - 2.6.0-M2 + 2.6.0-SNAPSHOT 4.3.0 ${mongo} 1.19 @@ -134,8 +134,8 @@ - spring-libs-milestone - https://repo.spring.io/libs-milestone + spring-libs-snapshot + https://repo.spring.io/libs-snapshot sonatype-libs-snapshot From 1d943d62a370267244cdabbb04160418532671fe Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 26 Jul 2021 13:10:15 +0200 Subject: [PATCH 032/885] Fix build on Java 16. Make sure to use an initialized MappingContext. Closes: #3749 Original pull request: #3752. --- .../core/EntityOperationsUnitTests.java | 3 +- .../data/mongodb/core/MongoTemplateTests.java | 2 +- .../test/util/MappingContextConfigurer.java | 50 ++++++++++++ .../test/util/MongoConverterConfigurer.java | 40 ++++++++++ .../test/util/MongoTestMappingContext.java | 78 +++++++++++++++++++ .../mongodb/test/util/MongoTestTemplate.java | 2 +- .../util/MongoTestTemplateConfiguration.java | 51 +----------- .../test/util/ReactiveMongoTestTemplate.java | 2 +- 8 files changed, 174 insertions(+), 54 deletions(-) create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MappingContextConfigurer.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoConverterConfigurer.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestMappingContext.java diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java index 901ac1f0dd..160a598bc7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java @@ -24,6 +24,7 @@ import org.springframework.data.mapping.MappingException; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.TimeSeries; +import org.springframework.data.mongodb.test.util.MongoTestMappingContext; /** * Unit tests for {@link EntityOperations}. @@ -32,7 +33,7 @@ */ class EntityOperationsUnitTests { - EntityOperations operations = new EntityOperations(new MongoMappingContext()); + EntityOperations operations = new EntityOperations(MongoTestMappingContext.newTestContext()); @Test // GH-3731 void shouldReportInvalidTimeField() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java index f5521008f8..28cdaa4830 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java @@ -138,7 +138,7 @@ public class MongoTemplateTests { cfg.configureMappingContext(it -> { it.autocreateIndex(false); - it.intitalEntitySet(AuditablePerson.class); + it.initialEntitySet(AuditablePerson.class); }); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MappingContextConfigurer.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MappingContextConfigurer.java new file mode 100644 index 0000000000..75169f5e45 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MappingContextConfigurer.java @@ -0,0 +1,50 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +import org.springframework.lang.Nullable; + +/** + * Utility to configure {@link org.springframework.data.mongodb.core.mapping.MongoMappingContext} properties. + * + * @author Christoph Strobl + */ +public class MappingContextConfigurer { + + private @Nullable Set> intitalEntitySet; + boolean autocreateIndex = false; + + public void autocreateIndex(boolean autocreateIndex) { + this.autocreateIndex = autocreateIndex; + } + + public void initialEntitySet(Set> initialEntitySet) { + this.intitalEntitySet = initialEntitySet; + } + + public void initialEntitySet(Class... initialEntitySet) { + this.intitalEntitySet = new HashSet<>(Arrays.asList(initialEntitySet)); + } + + Set> initialEntitySet() { + return intitalEntitySet != null ? intitalEntitySet : Collections.emptySet(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoConverterConfigurer.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoConverterConfigurer.java new file mode 100644 index 0000000000..7129d9951a --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoConverterConfigurer.java @@ -0,0 +1,40 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.util.Arrays; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; + +/** + * Utility to configure {@link MongoCustomConversions}. + * + * @author Christoph Strobl + */ +public class MongoConverterConfigurer { + + CustomConversions customConversions; + + public void customConversions(CustomConversions customConversions) { + this.customConversions = customConversions; + } + + public void customConverters(Converter... converters) { + customConversions(new MongoCustomConversions(Arrays.asList(converters))); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestMappingContext.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestMappingContext.java new file mode 100644 index 0000000000..f9701d24aa --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestMappingContext.java @@ -0,0 +1,78 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.util.Collections; +import java.util.function.Consumer; + +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; + +/** + * @author Christoph Strobl + */ +public class MongoTestMappingContext extends MongoMappingContext { + + private MappingContextConfigurer contextConfigurer; + private MongoConverterConfigurer converterConfigurer; + + public static MongoTestMappingContext newTestContext() { + return new MongoTestMappingContext(conig -> {}).init(); + } + + public MongoTestMappingContext(MappingContextConfigurer contextConfig) { + + this.contextConfigurer = contextConfig; + this.converterConfigurer = new MongoConverterConfigurer(); + } + + public MongoTestMappingContext(Consumer contextConfig) { + + this(new MappingContextConfigurer()); + contextConfig.accept(contextConfigurer); + } + + public MongoTestMappingContext customConversions(MongoConverterConfigurer converterConfig) { + + this.converterConfigurer = converterConfig; + return this; + } + + public MongoTestMappingContext customConversions(Consumer converterConfig) { + + converterConfig.accept(converterConfigurer); + return this; + } + + public MongoTestMappingContext init() { + + setInitialEntitySet(contextConfigurer.initialEntitySet()); + setAutoIndexCreation(contextConfigurer.autocreateIndex); + if (converterConfigurer.customConversions != null) { + setSimpleTypeHolder(converterConfigurer.customConversions.getSimpleTypeHolder()); + } else { + setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); + } + + super.afterPropertiesSet(); + return this; + } + + @Override + public void afterPropertiesSet() { + init(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplate.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplate.java index ff1363965d..c612319e55 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplate.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplate.java @@ -50,7 +50,7 @@ public MongoTestTemplate(MongoClient client, String database, Class... initia cfg.configureMappingContext(it -> { it.autocreateIndex(false); - it.intitalEntitySet(initialEntities); + it.initialEntitySet(initialEntities); }); }); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java index ee75da8b19..b50ff88133 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java @@ -16,20 +16,15 @@ package org.springframework.data.mongodb.test.util; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; -import java.util.HashSet; import java.util.List; -import java.util.Set; import java.util.function.Consumer; import java.util.function.Function; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationListener; import org.springframework.context.ConfigurableApplicationContext; -import org.springframework.core.convert.converter.Converter; import org.springframework.data.auditing.IsNewAwareAuditingHandler; -import org.springframework.data.convert.CustomConversions; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; @@ -115,16 +110,7 @@ ApplicationContext getApplicationContext() { MongoMappingContext mappingContext() { if (mappingContext == null) { - - mappingContext = new MongoMappingContext(); - mappingContext.setInitialEntitySet(mappingContextConfigurer.initialEntitySet()); - mappingContext.setAutoIndexCreation(mappingContextConfigurer.autocreateIndex); - if(mongoConverterConfigurer.customConversions != null) { - mappingContext.setSimpleTypeHolder(mongoConverterConfigurer.customConversions.getSimpleTypeHolder()); - } else { - mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); - } - mappingContext.afterPropertiesSet(); + mappingContext = new MongoTestMappingContext(mappingContextConfigurer).customConversions(mongoConverterConfigurer).init(); } return mappingContext; @@ -222,41 +208,6 @@ public void defaultDb(String defaultDatabase) { } } - public static class MongoConverterConfigurer { - - CustomConversions customConversions; - - public void customConversions(CustomConversions customConversions) { - this.customConversions = customConversions; - } - - public void customConverters(Converter... converters) { - customConversions(new MongoCustomConversions(Arrays.asList(converters))); - } - } - - public static class MappingContextConfigurer { - - Set> intitalEntitySet; - boolean autocreateIndex = false; - - public void autocreateIndex(boolean autocreateIndex) { - this.autocreateIndex = autocreateIndex; - } - - public void intitalEntitySet(Set> intitalEntitySet) { - this.intitalEntitySet = intitalEntitySet; - } - - public void intitalEntitySet(Class... initialEntitySet) { - this.intitalEntitySet = new HashSet<>(Arrays.asList(initialEntitySet)); - } - - Set> initialEntitySet() { - return intitalEntitySet != null ? intitalEntitySet : Collections.emptySet(); - } - } - public static class AuditingConfigurer { Function auditingHandlerFunction; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoTestTemplate.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoTestTemplate.java index 9e7d2bbbfa..774493322e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoTestTemplate.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoTestTemplate.java @@ -56,7 +56,7 @@ public ReactiveMongoTestTemplate(MongoClient client, String database, Class.. cfg.configureMappingContext(it -> { it.autocreateIndex(false); - it.intitalEntitySet(initialEntities); + it.initialEntitySet(initialEntities); }); }); } From 255491c4468898be440cf51e727c53669e6de9ef Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 23 Aug 2021 09:31:25 +0200 Subject: [PATCH 033/885] Upgrade to MongoDB 4.3.1 Drivers. Closes: #3778 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index b688f3ee50..5d28c8a5c5 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ multi spring-data-mongodb 2.6.0-SNAPSHOT - 4.3.0 + 4.3.1 ${mongo} 1.19 From 23254c10dc7a11b5fe1c7937c889d42baa3fcee0 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Fri, 16 Jul 2021 11:37:08 +0200 Subject: [PATCH 034/885] Add support for `$setWindowFields` aggregation stage. Add a SetWindowFieldsOperation to the aggregation framework. The builder API allows fluent declaration of the aggregation stage as shown in the sample below. SetWindowFieldsOperation.builder() .partitionByField("state") .sortBy(Sort.by(Direction.ASC, "date")) .output(AccumulatorOperators.valueOf("qty").sum()) .within(Windows.documents().fromUnbounded().toCurrent().build()) .as("cumulativeQuantityForState") .build(); Closes #3711 Original pull request: #3739. --- .../aggregation/SetWindowFieldsOperation.java | 783 ++++++++++++++++++ .../SetWindowFieldsOperationTests.java | 132 +++ .../SetWindowFieldsOperationUnitTests.java | 111 +++ 3 files changed, 1026 insertions(+) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java new file mode 100644 index 0000000000..0f0909beb2 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java @@ -0,0 +1,783 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.bson.Document; +import org.springframework.data.domain.Sort; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * @author Christoph Strobl + * @since 3.3 + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/setWindowFields/ + */ +public class SetWindowFieldsOperation + implements AggregationOperation, FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation { + + @Nullable // + private Object partitionBy; + + @Nullable // + private AggregationOperation sortBy; + + private WindowOutput output; + + /** + * Create a new {@link SetWindowFieldsOperation} with given args. + * + * @param partitionBy The field or {@link AggregationExpression} to group by. + * @param sortBy the {@link SortOperation operation} to sort the documents by in the partition. + * @param output the {@link WindowOutput} containing the fields to add and the rules to calculate their respective + * values. + */ + public SetWindowFieldsOperation(@Nullable Object partitionBy, @Nullable AggregationOperation sortBy, + WindowOutput output) { + + this.partitionBy = partitionBy; + this.sortBy = sortBy; + this.output = output; + } + + /** + * Obtain a {@link SetWindowFieldsOperationBuilder builder} to create a {@link SetWindowFieldsOperation}. + * + * @return new instance of {@link SetWindowFieldsOperationBuilder}. + */ + public static SetWindowFieldsOperationBuilder builder() { + return new SetWindowFieldsOperationBuilder(); + } + + @Override + public ExposedFields getFields() { + return ExposedFields.nonSynthetic(Fields.from(output.fields.toArray(new Field[0]))); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document $setWindowFields = new Document(); + if (partitionBy != null) { + if (partitionBy instanceof AggregationExpression) { + $setWindowFields.append("partitionBy", ((AggregationExpression) partitionBy).toDocument(context)); + } else if (partitionBy instanceof Field) { + $setWindowFields.append("partitionBy", context.getReference((Field) partitionBy).toString()); + } else { + $setWindowFields.append("partitionBy", partitionBy); + } + } + + if (sortBy != null) { + $setWindowFields.append("sortBy", sortBy.toDocument(context).get(sortBy.getOperator())); + } + + Document output = new Document(); + for (ComputedField field : this.output.fields) { + + Document fieldOperation = field.getWindowOperator().toDocument(context); + if (field.window != null) { + fieldOperation.put("window", field.window.toDocument(context)); + } + output.append(field.getName(), fieldOperation); + } + $setWindowFields.append("output", output); + + return new Document(getOperator(), $setWindowFields); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#getOperator() + */ + @Override + public String getOperator() { + return "$setWindowFields"; + } + + /** + * {@link WindowOutput} defines output of {@literal $setWindowFields} stage by defining the {@link ComputedField + * field(s)} to append to the documents in the output. + */ + public static class WindowOutput { + + private List fields; + + /** + * Create a new output containing the single given {@link ComputedField field}. + * + * @param outputField must not be {@literal null}. + */ + public WindowOutput(ComputedField outputField) { + + Assert.notNull(outputField, "OutputField must not be null!"); + this.fields = new ArrayList<>(); + this.fields.add(outputField); + } + + /** + * Append the given {@link ComputedField field} to the outptut. + * + * @param field must not be {@literal null}. + * @return this. + */ + public WindowOutput append(ComputedField field) { + + Assert.notNull(field, "Field must not be null!"); + fields.add(field); + return this; + } + + /** + * Append the given {@link AggregationExpression} as a {@link ComputedField field} in a fluent way. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ComputedFieldAppender}. + * @see #append(ComputedField) + */ + public ComputedFieldAppender append(AggregationExpression expression) { + + return new ComputedFieldAppender() { + + @Nullable private Window window; + + @Override + public WindowOutput as(String fieldname) { + + return WindowOutput.this.append(new ComputedField(fieldname, expression, window)); + } + + @Override + public ComputedFieldAppender within(Window window) { + this.window = window; + return this; + } + }; + } + + /** + * Tiny little helper to allow fluent API usage for {@link #append(ComputedField)}. + */ + interface ComputedFieldAppender { + + /** + * Specify the target field name. + * + * @param fieldname the name of field to add to the target document. + * @return the {@link WindowOutput} that started the append operation. + */ + WindowOutput as(String fieldname); + + /** + * Specify the window boundaries. + * + * @param window must not be {@literal null}. + * @return this. + */ + ComputedFieldAppender within(Window window); + } + } + + /** + * A {@link Field} that the result of a computation done via an {@link AggregationExpression}. + * + * @author Christoph Strobl + */ + public static class ComputedField implements Field { + + private String name; + private AggregationExpression windowOperator; + + @Nullable // + private Window window; + + /** + * Create a new {@link ComputedField}. + * + * @param name the target field name. + * @param windowOperator the expression to calculate the field value. + */ + public ComputedField(String name, AggregationExpression windowOperator) { + this(name, windowOperator, null); + } + + /** + * Create a new {@link ComputedField}. + * + * @param name the target field name. + * @param windowOperator the expression to calculate the field value. + * @param window the boundaries to operate within. Can be {@literal null}. + */ + public ComputedField(String name, AggregationExpression windowOperator, @Nullable Window window) { + + this.name = name; + this.windowOperator = windowOperator; + this.window = window; + } + + @Override + public String getName() { + return name; + } + + @Override + public String getTarget() { + return getName(); + } + + @Override + public boolean isAliased() { + return false; + } + + public AggregationExpression getWindowOperator() { + return windowOperator; + } + + public Window getWindow() { + return window; + } + } + + /** + * Quick access to {@link DocumentWindow documents} and {@literal RangeWindow range} {@link Window windows}. + * + * @author Christoph Strobl + */ + public interface Windows { + + /** + * Create a document window relative to the position of the current document. + * + * @param lower an integer for a position relative to the current document, {@literal current} or + * {@literal unbounded}. + * @param upper an integer for a position relative to the current document, {@literal current} or + * {@literal unbounded}. + * @return new instance of {@link DocumentWindow}. + */ + static DocumentWindow documents(Object lower, Object upper) { + return new DocumentWindow(lower, upper); + } + + /** + * Create a range window defined based on sort expression. + * + * @param lower a numeric value to add the sort by field value of the current document, {@literal current} or + * {@literal unbounded}. + * @param upper a numeric value to add the sort by field value of the current document, {@literal current} or + * {@literal unbounded}. + * @return new instance of {@link RangeWindow}. + */ + static RangeWindow range(Object lower, Object upper, @Nullable WindowUnit unit) { + return new RangeWindow(lower, upper, unit); + } + + /** + * Create a range window based on the {@link Sort sort value} of the current document via a fluent API. + * + * @return new instance of {@link RangeWindowBuilder}. + */ + static RangeWindowBuilder range() { + return new RangeWindowBuilder(); + } + + /** + * Create a document window relative to the position of the current document via a fluent API. + * + * @return new instance of {@link DocumentWindowBuilder}. + */ + static DocumentWindowBuilder documents() { + return new DocumentWindowBuilder(); + } + } + + /** + * A {@link Window} to be used for {@link ComputedField#getWindow() ComputedField}. + */ + public interface Window { + + /** + * The upper (inclusive) boundary. + * + * @return + */ + Object getUpper(); + + /** + * The lower (inclusive) boundary. + * + * @return + */ + Object getLower(); + + /** + * Obtain the document representation of the window in a default {@link AggregationOperationContext context}. + * + * @return never {@literal null}. + */ + default Document toDocument() { + return toDocument(Aggregation.DEFAULT_CONTEXT); + } + + /** + * Obtain the document representation of the window in the given {@link AggregationOperationContext context}. + * + * @return never {@literal null}. + */ + Document toDocument(AggregationOperationContext ctx); + } + + /** + * Builder API for a {@link RangeWindow}. + * + * @author Christoph Strobl + */ + public static class RangeWindowBuilder { + + @Nullable // + private Object upper; + + @Nullable // + private Object lower; + + @Nullable // + private WindowUnit unit; + + /** + * The upper (inclusive) range limit based on the sortBy field. + * + * @param upper eg. {@literal current} or {@literal unbounded}. + * @return this. + */ + public RangeWindowBuilder to(String upper) { + + this.upper = upper; + return this; + } + + /** + * The lower (inclusive) range limit based on the sortBy field. + * + * @param lower eg. {@literal current} or {@literal unbounded}. + * @return this. + */ + public RangeWindowBuilder from(String lower) { + + this.lower = lower; + return this; + } + + /** + * The upper (inclusive) range limit value to add to the value based on the sortBy field. + * + * @param upper + * @return this. + */ + public RangeWindowBuilder to(Number upper) { + + this.upper = upper; + return this; + } + + /** + * The lower (inclusive) range limit value to add to the value based on the sortBy field. + * + * @param lower + * @return this. + */ + public RangeWindowBuilder from(Number lower) { + + this.lower = lower; + return this; + } + + /** + * Use {@literal current} as {@link #from(String) lower} limit. + * + * @return this. + */ + public RangeWindowBuilder fromCurrent() { + return from("current"); + } + + /** + * Use {@literal unbounded} as {@link #from(String) lower} limit. + * + * @return this. + */ + public RangeWindowBuilder fromUnbounded() { + return from("unbounded"); + } + + /** + * Use {@literal current} as {@link #to(String) upper} limit. + * + * @return this. + */ + public RangeWindowBuilder toCurrent() { + return to("current"); + } + + /** + * Use {@literal unbounded} as {@link #to(String) upper} limit. + * + * @return this. + */ + public RangeWindowBuilder toUnbounded() { + return to("unbounded"); + } + + /** + * Set the {@link WindowUnit unit} or measure for the given {@link Window}. + * + * @param windowUnit must not be {@literal null}. Can be on of {@link Windows}. + * @return this. + */ + public RangeWindowBuilder unit(WindowUnit windowUnit) { + + this.unit = unit; + return this; + } + + /** + * Build the {@link RangeWindow}. + * + * @return new instance of {@link RangeWindow}. + */ + public RangeWindow build() { + return new RangeWindow(lower, upper, unit); + } + } + + /** + * Builder API for a {@link RangeWindow}. + * + * @author Christoph Strobl + */ + public static class DocumentWindowBuilder { + + @Nullable // + private Object upper; + + @Nullable // + private Object lower; + + public DocumentWindowBuilder from(Number lower) { + + this.lower = lower; + return this; + } + + public DocumentWindowBuilder fromCurrent() { + return from("current"); + } + + public DocumentWindowBuilder fromUnbounded() { + return from("unbounded"); + } + + public DocumentWindowBuilder to(String upper) { + + this.upper = upper; + return this; + } + + /** + * The lower (inclusive) range limit based on current document. + * + * @param lower eg. {@literal current} or {@literal unbounded}. + * @return this. + */ + public DocumentWindowBuilder from(String lower) { + + this.lower = lower; + return this; + } + + /** + * The upper (inclusive) range limit based on current document. + * + * @param upper eg. {@literal current} or {@literal unbounded}. + * @return this. + */ + public DocumentWindowBuilder to(Number upper) { + + this.upper = upper; + return this; + } + + public DocumentWindowBuilder toCurrent() { + return to("current"); + } + + public DocumentWindowBuilder toUnbounded() { + return to("unbounded"); + } + + public DocumentWindow build() { + return new DocumentWindow(lower, upper); + } + } + + /** + * Common base class for {@link Window} implementation. + * + * @author Christoph Strobl + */ + abstract static class WindowImp implements Window { + + private final Object upper; + private final Object lower; + + protected WindowImp(Object lower, Object upper) { + this.upper = upper; + this.lower = lower; + } + + @Override + public Object getUpper() { + return upper; + } + + @Override + public Object getLower() { + return lower; + } + } + + /** + * {@link Window} implementation based on the current document. + * + * @author Christoph Strobl + */ + public static class DocumentWindow extends WindowImp { + + DocumentWindow(Object lower, Object upper) { + super(lower, upper); + } + + @Override + public Document toDocument(AggregationOperationContext ctx) { + return new Document("documents", Arrays.asList(getLower(), getUpper())); + } + } + + /** + * {@link Window} implementation based on the sort fields. + * + * @author Christoph Strobl + */ + public static class RangeWindow extends WindowImp { + + @Nullable // + private WindowUnit unit; + + protected RangeWindow(Object lower, Object upper, WindowUnit unit) { + + super(lower, upper); + this.unit = unit; + } + + @Override + public Document toDocument(AggregationOperationContext ctx) { + + Document range = new Document("range", new Object[] { getLower(), getUpper() }); + if (unit != null && !WindowUnits.DEFAULT.equals(unit)) { + range.append("unit", unit.name().toLowerCase()); + } + return range; + } + } + + /** + * The actual time unit to apply to a {@link Window}. + */ + public interface WindowUnit { + String name(); + } + + /** + * Quick access to available {@link WindowUnit units}. + */ + public enum WindowUnits implements WindowUnit { + DEFAULT, YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, SECOND, MILLISECOND + } + + /** + * A fluent builder to create a {@link SetWindowFieldsOperation}. + * + * @author Christoph Strobl + */ + public static class SetWindowFieldsOperationBuilder { + + private Object partitionBy; + private SortOperation sortOperation; + private WindowOutput output; + + /** + * Specify the field to group by. + * + * @param fieldName must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder partitionByField(String fieldName) { + return partitionBy(Fields.field("$" + fieldName, fieldName)); + } + + /** + * Specify the {@link AggregationExpression expression} to group by. + * + * @param expression must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder partitionByExpression(AggregationExpression expression) { + return partitionBy(expression); + } + + /** + * Sort {@link Sort.Direction#ASC ascending} by the given fields. + * + * @param fields must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder sortBy(String... fields) { + return sortBy(Sort.by(fields)); + } + + /** + * Set the sort order. + * + * @param sort must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder sortBy(Sort sort) { + return sortBy(new SortOperation(sort)); + } + + /** + * Set the {@link SortOperation} to use. + * + * @param sort must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder sortBy(SortOperation sort) { + + this.sortOperation = sort; + return this; + } + + /** + * Define the actual output computation. + * + * @param output must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder output(WindowOutput output) { + + this.output = output; + return this; + } + + /** + * Add a field capturing the result of the given {@link AggregationExpression expression} to the output. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link WindowChoice}. + */ + public WindowChoice output(AggregationExpression expression) { + + return new WindowChoice() { + + @Nullable private Window window; + + @Override + public As within(Window window) { + + this.window = window; + return this; + } + + @Override + public SetWindowFieldsOperationBuilder as(String targetFieldName) { + + ComputedField computedField = new ComputedField(targetFieldName, expression, window); + + if (SetWindowFieldsOperationBuilder.this.output == null) { + SetWindowFieldsOperationBuilder.this.output = new WindowOutput(computedField); + } else { + SetWindowFieldsOperationBuilder.this.output.append(computedField); + } + + return SetWindowFieldsOperationBuilder.this; + } + }; + } + + /** + * Interface to capture field name used to capture the computation result. + */ + public interface As { + + /** + * Define the target name field name to hold the computation result. + * + * @param targetFieldName must not be {@literal null}. + * @return the starting point {@link SetWindowFieldsOperationBuilder builder} instance. + */ + SetWindowFieldsOperationBuilder as(String targetFieldName); + } + + /** + * Interface to capture an optional {@link Window} applicable to the field computation. + */ + public interface WindowChoice extends As { + + /** + * Specify calculation boundaries. + * + * @param window must not be {@literal null}. + * @return never {@literal null}. + */ + As within(Window window); + + } + + /** + * Partition by a value that transaltes to a valid mongodb expression. + * + * @param value must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder partitionBy(Object value) { + + partitionBy = value; + return this; + } + + /** + * Obtain a new instance of {@link SetWindowFieldsOperation} with previously set arguments. + * + * @return new instance of {@link SetWindowFieldsOperation}. + */ + public SetWindowFieldsOperation build() { + return new SetWindowFieldsOperation(partitionBy, sortOperation, output); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java new file mode 100644 index 0000000000..b88e0479a3 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java @@ -0,0 +1,132 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; +import java.util.Date; +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.DateOperators.Year; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.Windows; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MongoTemplateExtension.class) +@EnableIfMongoServerVersion(isGreaterThanEqual = "5.0") +class SetWindowFieldsOperationTests { + + @Template // + private static MongoTestTemplate mongoTemplate; + + @AfterEach + void afterEach() { + mongoTemplate.flush(CakeSale.class); + } + + @Test // GH-3711 + void executesSetWindowFieldsOperationCorrectly() { + + initCakeSales(); + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .partitionByField("state") // resolves to field ref "$state" + .sortBy(Sort.by(Direction.ASC, "date")) // resolves to "orderDate" + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("cumulativeQuantityForState") // + .build(); // + + AggregationResults results = mongoTemplate.aggregateAndReturn(Document.class) + .by(Aggregation.newAggregation(CakeSale.class, setWindowFieldsOperation)).all(); + + assertThat(results.getMappedResults()).map(it -> it.get("cumulativeQuantityForState")).contains(162, 282, 427, 134, + 238, 378); + } + + @Test // GH-3711 + void executesSetWindowFieldsOperationWithPartitionExpressionCorrectly() { + + initCakeSales(); + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .partitionByExpression(Year.yearOf("date")) // resolves to $year: "$orderDate" + .sortBy(Sort.by(Direction.ASC, "date")) // resolves to "orderDate" + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("cumulativeQuantityForState") // + .build(); // + + AggregationResults results = mongoTemplate.aggregateAndReturn(Document.class) + .by(Aggregation.newAggregation(CakeSale.class, setWindowFieldsOperation)).all(); + + assertThat(results.getMappedResults()).map(it -> it.get("cumulativeQuantityForState")).contains(134, 296, 104, 224, + 145, 285); + } + + void initCakeSales() { + + mongoTemplate.execute(CakeSale.class, collection -> { + + List source = Arrays.asList(Document.parse( + "{ _id: 0, type: \"chocolate\", orderDate: { $date : \"2020-05-18T14:10:30Z\" }, state: \"CA\", price: 13, quantity: 120 }"), + Document.parse( + "{ _id: 1, type: \"chocolate\", orderDate: { $date : \"2021-03-20T11:30:05Z\"}, state: \"WA\", price: 14, quantity: 140 }"), + Document.parse( + "{ _id: 2, type: \"vanilla\", orderDate: { $date : \"2021-01-11T06:31:15Z\"}, state: \"CA\", price: 12, quantity: 145 }"), + Document.parse( + "{ _id: 3, type: \"vanilla\", orderDate: { $date : \"2020-02-08T13:13:23Z\"}, state: \"WA\", price: 13, quantity: 104 }"), + Document.parse( + "{ _id: 4, type: \"strawberry\", orderDate: { $date : \"2019-05-18T16:09:01Z\"}, state: \"CA\", price: 41, quantity: 162 }"), + Document.parse( + "{ _id: 5, type: \"strawberry\", orderDate: { $date : \"2019-01-08T06:12:03Z\"}, state: \"WA\", price: 43, quantity: 134 }")); + + collection.insertMany(source); + return "OK"; + }); + } + + @lombok.Data + static class CakeSale { + + @Id Integer id; + + String state; + + @Field("orderDate") // + Date date; + + @Field("quantity") // + Integer qty; + + String type; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java new file mode 100644 index 0000000000..87e3f8f54c --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java @@ -0,0 +1,111 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Date; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.Windows; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * @author Christoph Strobl + */ +public class SetWindowFieldsOperationUnitTests { + + @Test // GH-3711 + void rendersTargetFieldNamesCorrectly() { + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .partitionByField("state") // resolves to field ref "$state" + .sortBy(Sort.by(Direction.ASC, "date")) // resolves to "orderDate" + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("cumulativeQuantityForState") // + .build(); // + + Document document = setWindowFieldsOperation.toDocument(contextFor(CakeSale.class)); + assertThat(document).isEqualTo(Document.parse( + "{ $setWindowFields: { partitionBy: \"$state\", sortBy: { orderDate: 1 }, output: { cumulativeQuantityForState: { $sum: \"$quantity\", window: { documents: [ \"unbounded\", \"current\" ] } } } } }")); + } + + @Test // GH-3711 + void exposesTargetFieldNames() { + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("f1") // + .output(AccumulatorOperators.valueOf("qty").avg()) // resolves to "$quantity" + .within(Windows.documents().from(-1).to(0).build()) // + .as("f2") // + .build(); // + + assertThat(setWindowFieldsOperation.getFields()).map(ExposedField::getName).containsExactly("f1", "f2"); + } + + @Test // GH-3711 + void rendersMuiltipleOutputFields() { + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("f1") // + .output(AccumulatorOperators.valueOf("qty").avg()) // resolves to "$quantity" + .within(Windows.documents().from(-1).to(0).build()) // + .as("f2") // + .build(); // + + Document document = setWindowFieldsOperation.toDocument(contextFor(CakeSale.class)); + assertThat(document).isEqualTo(Document.parse( + "{ $setWindowFields: { output: { f1 : { $sum: \"$quantity\", window: { documents: [ \"unbounded\", \"current\" ] } }, f2 : { $avg: \"$quantity\", window: { documents: [ -1, 0 ] } } } } }")); + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)); + } + + static class CakeSale { + + String state; + + @Field("orderDate") Date date; + + @Field("quantity") Integer qty; + + } +} From f9f4c4621be4a8bd03a542caf82b48a528d913db Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Mon, 23 Aug 2021 11:16:21 +0200 Subject: [PATCH 035/885] Polishing. Update javadoc and add assertions. See #3711 Original pull request: #3739. --- .../aggregation/SetWindowFieldsOperation.java | 176 ++++++++++-------- .../SetWindowFieldsOperationTests.java | 2 + .../SetWindowFieldsOperationUnitTests.java | 4 +- 3 files changed, 107 insertions(+), 75 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java index 0f0909beb2..9c40a0b642 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java @@ -25,6 +25,8 @@ import org.springframework.util.Assert; /** + * Encapsulates the {@code setWindowFields}-operation. + * * @author Christoph Strobl * @since 3.3 * @see fields; + private final List fields; /** * Create a new output containing the single given {@link ComputedField field}. @@ -128,6 +129,7 @@ public static class WindowOutput { public WindowOutput(ComputedField outputField) { Assert.notNull(outputField, "OutputField must not be null!"); + this.fields = new ArrayList<>(); this.fields.add(outputField); } @@ -141,6 +143,7 @@ public WindowOutput(ComputedField outputField) { public WindowOutput append(ComputedField field) { Assert.notNull(field, "Field must not be null!"); + fields.add(field); return this; } @@ -202,11 +205,9 @@ interface ComputedFieldAppender { */ public static class ComputedField implements Field { - private String name; - private AggregationExpression windowOperator; - - @Nullable // - private Window window; + private final String name; + private final AggregationExpression windowOperator; + private final @Nullable Window window; /** * Create a new {@link ComputedField}. @@ -286,7 +287,7 @@ static DocumentWindow documents(Object lower, Object upper) { * @return new instance of {@link RangeWindow}. */ static RangeWindow range(Object lower, Object upper, @Nullable WindowUnit unit) { - return new RangeWindow(lower, upper, unit); + return new RangeWindow(lower, upper, unit == null ? WindowUnits.DEFAULT : unit); } /** @@ -314,18 +315,18 @@ static DocumentWindowBuilder documents() { public interface Window { /** - * The upper (inclusive) boundary. + * The lower (inclusive) boundary. * * @return */ - Object getUpper(); + Object getLower(); /** - * The lower (inclusive) boundary. - * + * The upper (inclusive) boundary. + * * @return */ - Object getLower(); + Object getUpper(); /** * Obtain the document representation of the window in a default {@link AggregationOperationContext context}. @@ -351,14 +352,21 @@ default Document toDocument() { */ public static class RangeWindowBuilder { - @Nullable // - private Object upper; + private @Nullable Object lower; + private @Nullable Object upper; + private @Nullable WindowUnit unit; - @Nullable // - private Object lower; + /** + * The lower (inclusive) range limit based on the sortBy field. + * + * @param lower eg. {@literal current} or {@literal unbounded}. + * @return this. + */ + public RangeWindowBuilder from(String lower) { - @Nullable // - private WindowUnit unit; + this.lower = lower; + return this; + } /** * The upper (inclusive) range limit based on the sortBy field. @@ -373,19 +381,23 @@ public RangeWindowBuilder to(String upper) { } /** - * The lower (inclusive) range limit based on the sortBy field. + * The lower (inclusive) range limit value to add to the value based on the sortBy field. Use a negative integer for + * a position before the current document. Use a positive integer for a position after the current document. + * {@code 0} is the current document position. * - * @param lower eg. {@literal current} or {@literal unbounded}. + * @param lower * @return this. */ - public RangeWindowBuilder from(String lower) { + public RangeWindowBuilder from(Number lower) { this.lower = lower; return this; } /** - * The upper (inclusive) range limit value to add to the value based on the sortBy field. + * The upper (inclusive) range limit value to add to the value based on the sortBy field. Use a negative integer for + * a position before the current document. Use a positive integer for a position after the current document. + * {@code 0} is the current document position. * * @param upper * @return this. @@ -396,25 +408,13 @@ public RangeWindowBuilder to(Number upper) { return this; } - /** - * The lower (inclusive) range limit value to add to the value based on the sortBy field. - * - * @param lower - * @return this. - */ - public RangeWindowBuilder from(Number lower) { - - this.lower = lower; - return this; - } - /** * Use {@literal current} as {@link #from(String) lower} limit. * * @return this. */ public RangeWindowBuilder fromCurrent() { - return from("current"); + return from(CURRENT); } /** @@ -423,7 +423,7 @@ public RangeWindowBuilder fromCurrent() { * @return this. */ public RangeWindowBuilder fromUnbounded() { - return from("unbounded"); + return from(UNBOUNDED); } /** @@ -432,7 +432,7 @@ public RangeWindowBuilder fromUnbounded() { * @return this. */ public RangeWindowBuilder toCurrent() { - return to("current"); + return to(CURRENT); } /** @@ -441,7 +441,7 @@ public RangeWindowBuilder toCurrent() { * @return this. */ public RangeWindowBuilder toUnbounded() { - return to("unbounded"); + return to(UNBOUNDED); } /** @@ -452,7 +452,8 @@ public RangeWindowBuilder toUnbounded() { */ public RangeWindowBuilder unit(WindowUnit windowUnit) { - this.unit = unit; + Assert.notNull(windowUnit, "WindowUnit must not be null"); + this.unit = windowUnit; return this; } @@ -462,6 +463,11 @@ public RangeWindowBuilder unit(WindowUnit windowUnit) { * @return new instance of {@link RangeWindow}. */ public RangeWindow build() { + + Assert.notNull(lower, "Lower bound must not be null"); + Assert.notNull(upper, "Upper bound must not be null"); + Assert.notNull(unit, "WindowUnit bound must not be null"); + return new RangeWindow(lower, upper, unit); } } @@ -473,12 +479,17 @@ public RangeWindow build() { */ public static class DocumentWindowBuilder { - @Nullable // - private Object upper; - - @Nullable // - private Object lower; + private @Nullable Object lower; + private @Nullable Object upper; + /** + * The lower (inclusive) range limit based on current document. Use a negative integer for a position before the + * current document. Use a positive integer for a position after the current document. {@code 0} is the current + * document position. + * + * @param lower + * @return this. + */ public DocumentWindowBuilder from(Number lower) { this.lower = lower; @@ -486,11 +497,11 @@ public DocumentWindowBuilder from(Number lower) { } public DocumentWindowBuilder fromCurrent() { - return from("current"); + return from(CURRENT); } public DocumentWindowBuilder fromUnbounded() { - return from("unbounded"); + return from(UNBOUNDED); } public DocumentWindowBuilder to(String upper) { @@ -512,9 +523,11 @@ public DocumentWindowBuilder from(String lower) { } /** - * The upper (inclusive) range limit based on current document. + * The upper (inclusive) range limit based on current document. Use a negative integer for a position before the + * current document. Use a positive integer for a position after the current document. {@code 0} is the current + * document position. * - * @param upper eg. {@literal current} or {@literal unbounded}. + * @param upper * @return this. */ public DocumentWindowBuilder to(Number upper) { @@ -524,14 +537,18 @@ public DocumentWindowBuilder to(Number upper) { } public DocumentWindowBuilder toCurrent() { - return to("current"); + return to(CURRENT); } public DocumentWindowBuilder toUnbounded() { - return to("unbounded"); + return to(UNBOUNDED); } public DocumentWindow build() { + + Assert.notNull(lower, "Lower bound must not be null"); + Assert.notNull(upper, "Upper bound must not be null"); + return new DocumentWindow(lower, upper); } } @@ -541,24 +558,24 @@ public DocumentWindow build() { * * @author Christoph Strobl */ - abstract static class WindowImp implements Window { + static abstract class WindowImpl implements Window { - private final Object upper; private final Object lower; + private final Object upper; - protected WindowImp(Object lower, Object upper) { - this.upper = upper; + protected WindowImpl(Object lower, Object upper) { this.lower = lower; + this.upper = upper; } @Override - public Object getUpper() { - return upper; + public Object getLower() { + return lower; } @Override - public Object getLower() { - return lower; + public Object getUpper() { + return upper; } } @@ -567,7 +584,7 @@ public Object getLower() { * * @author Christoph Strobl */ - public static class DocumentWindow extends WindowImp { + public static class DocumentWindow extends WindowImpl { DocumentWindow(Object lower, Object upper) { super(lower, upper); @@ -584,10 +601,9 @@ public Document toDocument(AggregationOperationContext ctx) { * * @author Christoph Strobl */ - public static class RangeWindow extends WindowImp { + public static class RangeWindow extends WindowImpl { - @Nullable // - private WindowUnit unit; + private final WindowUnit unit; protected RangeWindow(Object lower, Object upper, WindowUnit unit) { @@ -634,10 +650,12 @@ public static class SetWindowFieldsOperationBuilder { /** * Specify the field to group by. * - * @param fieldName must not be {@literal null}. + * @param fieldName must not be {@literal null} or null. * @return this. */ public SetWindowFieldsOperationBuilder partitionByField(String fieldName) { + + Assert.hasText(fieldName, "Field name must not be empty or null"); return partitionBy(Fields.field("$" + fieldName, fieldName)); } @@ -679,6 +697,8 @@ public SetWindowFieldsOperationBuilder sortBy(Sort sort) { */ public SetWindowFieldsOperationBuilder sortBy(SortOperation sort) { + Assert.notNull(sort, "SortOperation must not be null"); + this.sortOperation = sort; return this; } @@ -691,6 +711,8 @@ public SetWindowFieldsOperationBuilder sortBy(SortOperation sort) { */ public SetWindowFieldsOperationBuilder output(WindowOutput output) { + Assert.notNull(output, "WindowOutput must not be null"); + this.output = output; return this; } @@ -710,6 +732,8 @@ public WindowChoice output(AggregationExpression expression) { @Override public As within(Window window) { + Assert.notNull(window, "Window must not be null"); + this.window = window; return this; } @@ -717,6 +741,8 @@ public As within(Window window) { @Override public SetWindowFieldsOperationBuilder as(String targetFieldName) { + Assert.hasText(targetFieldName, "Target field name must not be empty or null"); + ComputedField computedField = new ComputedField(targetFieldName, expression, window); if (SetWindowFieldsOperationBuilder.this.output == null) { @@ -738,7 +764,7 @@ public interface As { /** * Define the target name field name to hold the computation result. * - * @param targetFieldName must not be {@literal null}. + * @param targetFieldName must not be {@literal null} or empty. * @return the starting point {@link SetWindowFieldsOperationBuilder builder} instance. */ SetWindowFieldsOperationBuilder as(String targetFieldName); @@ -760,20 +786,22 @@ public interface WindowChoice extends As { } /** - * Partition by a value that transaltes to a valid mongodb expression. + * Partition by a value that translates to a valid mongodb expression. * * @param value must not be {@literal null}. * @return this. */ public SetWindowFieldsOperationBuilder partitionBy(Object value) { + Assert.notNull(value, "Partition By must not be null"); + partitionBy = value; return this; } /** * Obtain a new instance of {@link SetWindowFieldsOperation} with previously set arguments. - * + * * @return new instance of {@link SetWindowFieldsOperation}. */ public SetWindowFieldsOperation build() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java index b88e0479a3..17bfb9b5a3 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java @@ -37,6 +37,8 @@ import org.springframework.data.mongodb.test.util.Template; /** + * Integration tests for {@link SetWindowFieldsOperation}. + * * @author Christoph Strobl */ @ExtendWith(MongoTemplateExtension.class) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java index 87e3f8f54c..62b0f4dffc 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java @@ -33,9 +33,11 @@ import org.springframework.lang.Nullable; /** + * Unit tests for {@link SetWindowFieldsOperation}. + * * @author Christoph Strobl */ -public class SetWindowFieldsOperationUnitTests { +class SetWindowFieldsOperationUnitTests { @Test // GH-3711 void rendersTargetFieldNamesCorrectly() { From c574e5cf8a5b553399a0faf65d1ba0e1574470aa Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Thu, 22 Jul 2021 11:44:15 +0200 Subject: [PATCH 036/885] Add support for `$covariancePop` and `$covarianceSamp` aggregation expressions. Closes: #3712 Original pull request: #3740. --- .../aggregation/AccumulatorOperators.java | 177 ++++++++++++++++++ .../core/aggregation/ArithmeticOperators.java | 59 ++++++ .../core/spel/MethodReferenceNode.java | 2 + .../aggregation/TestAggregationContext.java | 75 ++++++++ .../AccumulatorOperatorsUnitTests.java | 77 ++++++++ .../SpelExpressionTransformerUnitTests.java | 10 + 6 files changed, 400 insertions(+) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/aggregation/TestAggregationContext.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java index 6698b932f8..1ea1af9731 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java @@ -142,6 +142,63 @@ public StdDevSamp stdDevSamp() { return usesFieldRef() ? StdDevSamp.stdDevSampOf(fieldReference) : StdDevSamp.stdDevSampOf(expression); } + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the given + * field to calculate the population covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(String fieldReference) { + return covariancePop().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the given + * {@link AggregationExpression expression} to calculate the population covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(AggregationExpression expression) { + return covariancePop().and(expression); + } + + private CovariancePop covariancePop() { + return usesFieldRef() ? CovariancePop.covariancePopOf(fieldReference) : CovariancePop.covariancePopOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the given + * field to calculate the sample covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(String fieldReference) { + return covarianceSamp().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the given + * {@link AggregationExpression expression} to calculate the sample covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(AggregationExpression expression) { + return covarianceSamp().and(expression); + } + + private CovarianceSamp covarianceSamp() { + return usesFieldRef() ? CovarianceSamp.covarianceSampOf(fieldReference) + : CovarianceSamp.covarianceSampOf(expression); + } + private boolean usesFieldRef() { return fieldReference != null; } @@ -658,4 +715,124 @@ public Document toDocument(Object value, AggregationOperationContext context) { return super.toDocument(value, context); } } + + /** + * {@link AggregationExpression} for {@code $covariancePop}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class CovariancePop extends AbstractAggregationExpression { + + private CovariancePop(Object value) { + super(value); + } + + /** + * Creates new {@link CovariancePop}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public static CovariancePop covariancePopOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new CovariancePop(asFields(fieldReference)); + } + + /** + * Creates new {@link CovariancePop}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public static CovariancePop covariancePopOf(AggregationExpression expression) { + return new CovariancePop(Collections.singletonList(expression)); + } + + /** + * Creates new {@link CovariancePop} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public CovariancePop and(String fieldReference) { + return new CovariancePop(append(asFields(fieldReference))); + } + + /** + * Creates new {@link CovariancePop} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public CovariancePop and(AggregationExpression expression) { + return new CovariancePop(append(expression)); + } + + @Override + protected String getMongoMethod() { + return "$covariancePop"; + } + } + + /** + * {@link AggregationExpression} for {@code $covarianceSamp}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class CovarianceSamp extends AbstractAggregationExpression { + + private CovarianceSamp(Object value) { + super(value); + } + + /** + * Creates new {@link CovarianceSamp}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public static CovarianceSamp covarianceSampOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new CovarianceSamp(asFields(fieldReference)); + } + + /** + * Creates new {@link CovarianceSamp}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public static CovarianceSamp covarianceSampOf(AggregationExpression expression) { + return new CovarianceSamp(Collections.singletonList(expression)); + } + + /** + * Creates new {@link CovarianceSamp} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public CovarianceSamp and(String fieldReference) { + return new CovarianceSamp(append(asFields(fieldReference))); + } + + /** + * Creates new {@link CovarianceSamp} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public CovarianceSamp and(AggregationExpression expression) { + return new CovarianceSamp(append(expression)); + } + + @Override + protected String getMongoMethod() { + return "$covarianceSamp"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 6053f3ae1b..b27e54d298 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -19,6 +19,8 @@ import java.util.List; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Avg; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovariancePop; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovarianceSamp; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Max; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Min; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevPop; @@ -511,6 +513,63 @@ public StdDevSamp stdDevSamp() { : AccumulatorOperators.StdDevSamp.stdDevSampOf(expression); } + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the given + * field to calculate the population covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(String fieldReference) { + return covariancePop().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the given + * {@link AggregationExpression expression} to calculate the population covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(AggregationExpression expression) { + return covariancePop().and(expression); + } + + private CovariancePop covariancePop() { + return usesFieldRef() ? CovariancePop.covariancePopOf(fieldReference) : CovariancePop.covariancePopOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the given + * field to calculate the sample covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(String fieldReference) { + return covarianceSamp().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the given + * {@link AggregationExpression expression} to calculate the sample covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(AggregationExpression expression) { + return covarianceSamp().and(expression); + } + + private CovarianceSamp covarianceSamp() { + return usesFieldRef() ? CovarianceSamp.covarianceSampOf(fieldReference) + : CovarianceSamp.covarianceSampOf(expression); + } + /** * Creates new {@link AggregationExpression} that rounds a number to a whole integer or to a specified decimal * place. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 5a2c48bc20..c858926446 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -170,6 +170,8 @@ public class MethodReferenceNode extends ExpressionNode { map.put("addToSet", singleArgRef().forOperator("$addToSet")); map.put("stdDevPop", arrayArgRef().forOperator("$stdDevPop")); map.put("stdDevSamp", arrayArgRef().forOperator("$stdDevSamp")); + map.put("covariancePop", arrayArgRef().forOperator("$covariancePop")); + map.put("covarianceSamp", arrayArgRef().forOperator("$covarianceSamp")); // TYPE OPERATORS map.put("type", singleArgRef().forOperator("$type")); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/aggregation/TestAggregationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/aggregation/TestAggregationContext.java new file mode 100644 index 0000000000..4f16072e43 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/aggregation/TestAggregationContext.java @@ -0,0 +1,75 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.aggregation; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.data.mongodb.core.aggregation.Field; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * @author Christoph Strobl + */ +public class TestAggregationContext implements AggregationOperationContext { + + private final AggregationOperationContext delegate; + + private TestAggregationContext(AggregationOperationContext delegate) { + this.delegate = delegate; + } + + public static AggregationOperationContext contextFor(@Nullable Class type) { + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return contextFor(type, mongoConverter); + } + + public static AggregationOperationContext contextFor(@Nullable Class type, MongoConverter mongoConverter) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + return new TestAggregationContext(new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)).continueOnMissingFieldReference()); + } + + @Override + public Document getMappedObject(Document document, @Nullable Class type) { + return delegate.getMappedObject(document, type); + } + + @Override + public FieldReference getReference(Field field) { + return delegate.getReference(field); + } + + @Override + public FieldReference getReference(String name) { + return delegate.getReference(name); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java new file mode 100644 index 0000000000..977183c448 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java @@ -0,0 +1,77 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; +import java.util.Date; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.aggregation.DateOperators.Year; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.util.aggregation.TestAggregationContext; + +/** + * @author Christoph Strobl + */ +class AccumulatorOperatorsUnitTests { + + @Test // GH-3712 + void rendersCovariancePopWithFieldReference() { + + assertThat(AccumulatorOperators.valueOf("balance").covariancePop("midichlorianCount") + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(new Document("$covariancePop", Arrays.asList("$balance", "$force"))); + } + + @Test // GH-3712 + void rendersCovariancePopWithExpression() { + + assertThat(AccumulatorOperators.valueOf(Year.yearOf("birthdate")).covariancePop("midichlorianCount") + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(new Document("$covariancePop", Arrays.asList(new Document("$year", "$birthdate"), "$force"))); + } + + @Test // GH-3712 + void rendersCovarianceSampWithFieldReference() { + + assertThat(AccumulatorOperators.valueOf("balance").covarianceSamp("midichlorianCount") + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(new Document("$covarianceSamp", Arrays.asList("$balance", "$force"))); + } + + @Test // GH-3712 + void rendersCovarianceSampWithExpression() { + + assertThat(AccumulatorOperators.valueOf(Year.yearOf("birthdate")).covarianceSamp("midichlorianCount") + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(new Document("$covarianceSamp", Arrays.asList(new Document("$year", "$birthdate"), "$force"))); + } + + static class Jedi { + + String name; + + Date birthdate; + + @Field("force") + Integer midichlorianCount; + + Integer balance; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index b67beed126..c4b945ab94 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -946,6 +946,16 @@ public void shouldRenderRoundWithPlace() { assertThat(transform("round(field, 2)")).isEqualTo(Document.parse("{ \"$round\" : [\"$field\", 2]}")); } + @Test // GH-3712 + void shouldRenderCovariancePop() { + assertThat(transform("covariancePop(field1, field2)")).isEqualTo(Document.parse("{ \"$covariancePop\" : [\"$field1\", \"$field2\"]}")); + } + + @Test // GH-3712 + void shouldRenderCovarianceSamp() { + assertThat(transform("covarianceSamp(field1, field2)")).isEqualTo(Document.parse("{ \"$covarianceSamp\" : [\"$field1\", \"$field2\"]}")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From dbfd4e5c624a9770a2ee6d3235b1e22a604583c7 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Thu, 22 Jul 2021 11:44:15 +0200 Subject: [PATCH 037/885] Polishing. Reformat code. See #3712 Original pull request: #3740. --- .../core/aggregation/AccumulatorOperators.java | 16 ++++++++-------- .../AccumulatorOperatorsUnitTests.java | 2 ++ src/main/asciidoc/reference/mongodb.adoc | 8 ++++---- 3 files changed, 14 insertions(+), 12 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java index 1ea1af9731..13913caacf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java @@ -143,8 +143,8 @@ public StdDevSamp stdDevSamp() { } /** - * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the given - * field to calculate the population covariance of the two. + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the population covariance of the two. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link CovariancePop}. @@ -155,8 +155,8 @@ public CovariancePop covariancePop(String fieldReference) { } /** - * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the given - * {@link AggregationExpression expression} to calculate the population covariance of the two. + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the population covariance of the two. * * @param expression must not be {@literal null}. * @return new instance of {@link CovariancePop}. @@ -171,8 +171,8 @@ private CovariancePop covariancePop() { } /** - * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the given - * field to calculate the sample covariance of the two. + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the sample covariance of the two. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link CovariancePop}. @@ -183,8 +183,8 @@ public CovarianceSamp covarianceSamp(String fieldReference) { } /** - * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the given - * {@link AggregationExpression expression} to calculate the sample covariance of the two. + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the sample covariance of the two. * * @param expression must not be {@literal null}. * @return new instance of {@link CovariancePop}. diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java index 977183c448..6948255d15 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java @@ -27,6 +27,8 @@ import org.springframework.data.mongodb.util.aggregation.TestAggregationContext; /** + * Unit tests for {@link AccumulatorOperators}. + * * @author Christoph Strobl */ class AccumulatorOperatorsUnitTests { diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc index 84afc7ea09..3bfa500731 100644 --- a/src/main/asciidoc/reference/mongodb.adoc +++ b/src/main/asciidoc/reference/mongodb.adoc @@ -1002,7 +1002,7 @@ assertThat(upserted.getFirstName()).isEqualTo("Mary"); assertThat(upserted.getAge()).isOne(); ---- -[[mongo-template.aggregation-update]] += [[mongo-template.aggregation-update]] === Aggregation Pipeline Updates Update methods exposed by `MongoOperations` and `ReactiveMongoOperations` also accept an <> via `AggregationUpdate`. @@ -2502,8 +2502,8 @@ At the time of this writing, we provide support for the following Aggregation Op | Set Aggregation Operators | `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue` -| Group Aggregation Operators -| `addToSet`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp` +| Group/Accumulator Aggregation Operators +| `addToSet`, `covariancePop`, `covarianceSamp`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators | `abs`, `add` (*via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (*via `minus`), `trunc` @@ -2544,7 +2544,7 @@ At the time of this writing, we provide support for the following Aggregation Op * The operation is mapped or added by Spring Data MongoDB. -Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB. Comparison aggregation operators are expressed as `Criteria` expressions. +Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB.Comparison aggregation operators are expressed as `Criteria` expressions. [[mongo.aggregation.projection]] === Projection Expressions From f3e067f59f9147b4abc314b021c8b7769c2bc127 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Fri, 23 Jul 2021 09:10:35 +0200 Subject: [PATCH 038/885] Add support for `$expMovingAvg` aggregation operator. The SpEL support for this one is missing due to the differing argument map (N, alpha). Closes: #3718 Original pull request: #3744. --- .../aggregation/AccumulatorOperators.java | 111 ++++++++++++++++++ .../AccumulatorOperatorsUnitTests.java | 24 +++- src/main/asciidoc/reference/mongodb.adoc | 2 +- 3 files changed, 131 insertions(+), 6 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java index 13913caacf..ba2c34edab 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java @@ -199,11 +199,61 @@ private CovarianceSamp covarianceSamp() { : CovarianceSamp.covarianceSampOf(expression); } + /** + * Creates new {@link ExpMovingAvgBuilder} that to build {@link AggregationExpression expMovingAvg} that calculates + * the exponential moving average of numeric values + * + * @return new instance of {@link ExpMovingAvg}. + * @since 3.3 + */ + public ExpMovingAvgBuilder expMovingAvg() { + + ExpMovingAvg expMovingAvg = usesFieldRef() ? ExpMovingAvg.expMovingAvgOf(fieldReference) + : ExpMovingAvg.expMovingAvgOf(expression); + return new ExpMovingAvgBuilder() { + + @Override + public ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments) { + return expMovingAvg.n(numberOfHistoricalDocuments); + } + + @Override + public ExpMovingAvg alpha(double exponentialDecayValue) { + return expMovingAvg.alpha(exponentialDecayValue); + } + }; + } + private boolean usesFieldRef() { return fieldReference != null; } } + /** + * Builder for {@link ExpMovingAvg}. + * + * @since 3.3 + */ + public interface ExpMovingAvgBuilder { + + /** + * Define the number of historical documents with significant mathematical weight. + * + * @param numberOfHistoricalDocuments + * @return new instance of {@link ExpMovingAvg}. + */ + ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments); + + /** + * Define the exponential decay value. + * + * @param exponentialDecayValue + * @return new instance of {@link ExpMovingAvg}. + */ + ExpMovingAvg alpha(double exponentialDecayValue); + + } + /** * {@link AggregationExpression} for {@code $sum}. * @@ -835,4 +885,65 @@ protected String getMongoMethod() { return "$covarianceSamp"; } } + + /** + * {@link ExpMovingAvg} calculates the exponential moving average of numeric values. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class ExpMovingAvg extends AbstractAggregationExpression { + + private ExpMovingAvg(Object value) { + super(value); + } + + /** + * Create a new {@link ExpMovingAvg} by defining the field holding the value to be used as input. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ExpMovingAvg}. + */ + public static ExpMovingAvg expMovingAvgOf(String fieldReference) { + return new ExpMovingAvg(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Create a new {@link ExpMovingAvg} by defining the {@link AggregationExpression expression} to compute the value + * to be used as input. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ExpMovingAvg}. + */ + public static ExpMovingAvg expMovingAvgOf(AggregationExpression expression) { + return new ExpMovingAvg(Collections.singletonMap("input", expression)); + } + + /** + * Define the number of historical documents with significant mathematical weight.
+ * Specify either {@link #n(int) N} or {@link #alpha(double) aplha}. Not both! + * + * @param numberOfHistoricalDocuments + * @return new instance of {@link ExpMovingAvg}. + */ + public ExpMovingAvg n/*umber of historical documents*/(int numberOfHistoricalDocuments) { + return new ExpMovingAvg(append("N", numberOfHistoricalDocuments)); + } + + /** + * Define the exponential decay value.
+ * Specify either {@link #alpha(double) aplha} or {@link #n(int) N}. Not both! + * + * @param exponentialDecayValue + * @return new instance of {@link ExpMovingAvg}. + */ + public ExpMovingAvg alpha(double exponentialDecayValue) { + return new ExpMovingAvg(append("alpha", exponentialDecayValue)); + } + + @Override + protected String getMongoMethod() { + return "$expMovingAvg"; + } + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java index 6948255d15..27bd876255 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java @@ -16,6 +16,7 @@ package org.springframework.data.mongodb.core.aggregation; import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.*; import java.util.Arrays; import java.util.Date; @@ -46,7 +47,7 @@ void rendersCovariancePopWithExpression() { assertThat(AccumulatorOperators.valueOf(Year.yearOf("birthdate")).covariancePop("midichlorianCount") .toDocument(TestAggregationContext.contextFor(Jedi.class))) - .isEqualTo(new Document("$covariancePop", Arrays.asList(new Document("$year", "$birthdate"), "$force"))); + .isEqualTo(new Document("$covariancePop", Arrays.asList(new Document("$year", "$birthdate"), "$force"))); } @Test // GH-3712 @@ -54,7 +55,7 @@ void rendersCovarianceSampWithFieldReference() { assertThat(AccumulatorOperators.valueOf("balance").covarianceSamp("midichlorianCount") .toDocument(TestAggregationContext.contextFor(Jedi.class))) - .isEqualTo(new Document("$covarianceSamp", Arrays.asList("$balance", "$force"))); + .isEqualTo(new Document("$covarianceSamp", Arrays.asList("$balance", "$force"))); } @Test // GH-3712 @@ -62,7 +63,21 @@ void rendersCovarianceSampWithExpression() { assertThat(AccumulatorOperators.valueOf(Year.yearOf("birthdate")).covarianceSamp("midichlorianCount") .toDocument(TestAggregationContext.contextFor(Jedi.class))) - .isEqualTo(new Document("$covarianceSamp", Arrays.asList(new Document("$year", "$birthdate"), "$force"))); + .isEqualTo(new Document("$covarianceSamp", Arrays.asList(new Document("$year", "$birthdate"), "$force"))); + } + + @Test // GH-3718 + void rendersExpMovingAvgWithNumberOfHistoricDocuments() { + + assertThat(valueOf("price").expMovingAvg().historicalDocuments(2).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $expMovingAvg: { input: \"$price\", N: 2 } }")); + } + + @Test // GH-3718 + void rendersExpMovingAvgWithAlpha() { + + assertThat(valueOf("price").expMovingAvg().alpha(0.75).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $expMovingAvg: { input: \"$price\", alpha: 0.75 } }")); } static class Jedi { @@ -71,8 +86,7 @@ static class Jedi { Date birthdate; - @Field("force") - Integer midichlorianCount; + @Field("force") Integer midichlorianCount; Integer balance; } diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc index 3bfa500731..dfa87dd3c7 100644 --- a/src/main/asciidoc/reference/mongodb.adoc +++ b/src/main/asciidoc/reference/mongodb.adoc @@ -2503,7 +2503,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue` | Group/Accumulator Aggregation Operators -| `addToSet`, `covariancePop`, `covarianceSamp`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp` +| `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators | `abs`, `add` (*via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (*via `minus`), `trunc` From a977b8a790c0d71ad65e8a9bdab3ad6ca2005257 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 23 Aug 2021 13:26:57 +0200 Subject: [PATCH 039/885] Change visibility of Reactive/MongoRepositoryFactoryBean setters. Setters of the FactoryBean should be public. Closes: #3779 Original pull request: #3780. --- .../mongodb/repository/support/MongoRepositoryFactoryBean.java | 2 +- .../repository/support/ReactiveMongoRepositoryFactoryBean.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java index ade85d3110..8f156bdeea 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java @@ -70,7 +70,7 @@ public void setCreateIndexesForQueryMethods(boolean createIndexesForQueryMethods * @see org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport#setMappingContext(org.springframework.data.mapping.context.MappingContext) */ @Override - protected void setMappingContext(MappingContext mappingContext) { + public void setMappingContext(MappingContext mappingContext) { super.setMappingContext(mappingContext); this.mappingContextConfigured = true; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java index 4e8232714f..6536983a70 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java @@ -80,7 +80,7 @@ public void setCreateIndexesForQueryMethods(boolean createIndexesForQueryMethods * @see org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport#setMappingContext(org.springframework.data.mapping.context.MappingContext) */ @Override - protected void setMappingContext(MappingContext mappingContext) { + public void setMappingContext(MappingContext mappingContext) { super.setMappingContext(mappingContext); this.mappingContextConfigured = true; From 30da62181f92ee1e6983eccc4d017941e97ebbb8 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Thu, 22 Jul 2021 13:39:05 +0200 Subject: [PATCH 040/885] Add support for `$rank` and `$denseRank` aggregation operators. Closes: #3715 Original pull request: #3741. --- .../core/aggregation/DocumentOperators.java | 76 +++++++++++++++++++ .../SpelExpressionTransformer.java | 5 +- .../core/spel/MethodReferenceNode.java | 16 +++- .../DocumentOperatorsUnitTests.java | 39 ++++++++++ .../SpelExpressionTransformerUnitTests.java | 10 +++ 5 files changed, 144 insertions(+), 2 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java new file mode 100644 index 0000000000..8ba7acc5b8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java @@ -0,0 +1,76 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; + +/** + * @author Christoph Strobl + * @since 3.3 + */ +public class DocumentOperators { + + /** + * Obtain the document position (including gaps) relative to others (rank). + * + * @return new instance of {@link Rank}. + * @since 3.3 + */ + public static Rank rank() { + return new Rank(); + } + + /** + * Obtain the document position (without gaps) relative to others (rank). + * + * @return new instance of {@link DenseRank}. + * @since 3.3 + */ + public static DenseRank denseRank() { + return new DenseRank(); + } + + /** + * {@link Rank} resolves the current document position (the rank) relative to other documents. If multiple documents + * occupy the same rank, {@literal $rank} places the document with the subsequent value at a rank with a gap. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Rank implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$rank", new Document()); + } + } + + /** + * {@link DenseRank} resolves the current document position (the rank) relative to other documents. If multiple + * documents occupy the same rank, {@literal $denseRank} places the document with the subsequent value at the next rank without + * any gaps. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DenseRank implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$denseRank", new Document()); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java index 33b5c72c78..e00740945b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java @@ -500,7 +500,10 @@ protected Object convert(AggregationExpressionTransformationContext argList = new ArrayList(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index c858926446..763ae830e5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -68,6 +68,10 @@ public class MethodReferenceNode extends ExpressionNode { map.put("lte", arrayArgRef().forOperator("$lte")); map.put("ne", arrayArgRef().forOperator("$ne")); + // DOCUMENT OPERATORS + map.put("rank", emptyRef().forOperator("$rank")); + map.put("denseRank", emptyRef().forOperator("$denseRank")); + // ARITHMETIC OPERATORS map.put("abs", singleArgRef().forOperator("$abs")); map.put("add", arrayArgRef().forOperator("$add")); @@ -307,6 +311,16 @@ static AggregationMethodReference mapArgRef() { return new AggregationMethodReference(null, ArgumentType.MAP, null); } + /** + * Create a new {@link AggregationMethodReference} for a {@link ArgumentType#EMPTY_DOCUMENT} argument. + * + * @return never {@literal null}. + * @since 3.3 + */ + static AggregationMethodReference emptyRef() { + return new AggregationMethodReference(null, ArgumentType.EMPTY_DOCUMENT, null); + } + /** * Create a new {@link AggregationMethodReference} for a given {@literal aggregationExpressionOperator} reusing * previously set arguments. @@ -342,7 +356,7 @@ AggregationMethodReference mappingParametersTo(String... aggregationExpressionPr * @since 1.10 */ public enum ArgumentType { - SINGLE, ARRAY, MAP + SINGLE, ARRAY, MAP, EMPTY_DOCUMENT } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java new file mode 100644 index 0000000000..27ac1beccd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java @@ -0,0 +1,39 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.DocumentOperators.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * @author Christoph Strobl + */ +class DocumentOperatorsUnitTests { + + @Test // GH-3715 + void rendersRank() { + assertThat(rank().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(new Document("$rank", new Document())); + } + + @Test // GH-3715 + void rendersDenseRank() { + assertThat(denseRank().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(new Document("$denseRank", new Document())); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index c4b945ab94..06659820d8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -956,6 +956,16 @@ void shouldRenderCovarianceSamp() { assertThat(transform("covarianceSamp(field1, field2)")).isEqualTo(Document.parse("{ \"$covarianceSamp\" : [\"$field1\", \"$field2\"]}")); } + @Test // GH-3715 + void shouldRenderRank() { + assertThat(transform("rank()")).isEqualTo(Document.parse("{ $rank : {} }")); + } + + @Test // GH-3715 + void shouldRenderDenseRank() { + assertThat(transform("denseRank()")).isEqualTo(Document.parse("{ $denseRank : {} }")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From 1a86761e2e2002ad4f0aee50de6ebabaa931c84c Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Thu, 22 Jul 2021 14:43:48 +0200 Subject: [PATCH 041/885] Add support for `$documentNumber` aggregation operator. Closes: #3717 Original pull request: #3741. --- .../core/aggregation/DocumentOperators.java | 26 +++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 1 + .../DocumentOperatorsUnitTests.java | 5 ++++ .../SpelExpressionTransformerUnitTests.java | 5 ++++ 4 files changed, 37 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java index 8ba7acc5b8..7a5918e047 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java @@ -18,6 +18,8 @@ import org.bson.Document; /** + * Gateway to {@literal document expressions} such as {@literal $rank, $documentNumber, etc.} + * * @author Christoph Strobl * @since 3.3 */ @@ -43,6 +45,16 @@ public static DenseRank denseRank() { return new DenseRank(); } + /** + * Obtain the current document position. + * + * @return new instance of {@link DocumentNumber}. + * @since 3.3 + */ + public static DocumentNumber documentNumber() { + return new DocumentNumber(); + } + /** * {@link Rank} resolves the current document position (the rank) relative to other documents. If multiple documents * occupy the same rank, {@literal $rank} places the document with the subsequent value at a rank with a gap. @@ -73,4 +85,18 @@ public Document toDocument(AggregationOperationContext context) { return new Document("$denseRank", new Document()); } } + + /** + * {@link DocumentNumber} resolves the current document position. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DocumentNumber implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$documentNumber", new Document()); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 763ae830e5..a184c49be8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -71,6 +71,7 @@ public class MethodReferenceNode extends ExpressionNode { // DOCUMENT OPERATORS map.put("rank", emptyRef().forOperator("$rank")); map.put("denseRank", emptyRef().forOperator("$denseRank")); + map.put("documentNumber", emptyRef().forOperator("$documentNumber")); // ARITHMETIC OPERATORS map.put("abs", singleArgRef().forOperator("$abs")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java index 27ac1beccd..c6604bc543 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java @@ -36,4 +36,9 @@ void rendersDenseRank() { assertThat(denseRank().toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo(new Document("$denseRank", new Document())); } + + @Test // GH-3717 + void rendersDocumentNumber() { + assertThat(documentNumber().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(new Document("$documentNumber", new Document())); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 06659820d8..3c20b58d58 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -966,6 +966,11 @@ void shouldRenderDenseRank() { assertThat(transform("denseRank()")).isEqualTo(Document.parse("{ $denseRank : {} }")); } + @Test // GH-3717 + void shouldRenderDocumentNumber() { + assertThat(transform("documentNumber()")).isEqualTo(Document.parse("{ $documentNumber : {} }")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From 510028a834ff585e8805774a717bf94c56d009d8 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Fri, 23 Jul 2021 11:33:07 +0200 Subject: [PATCH 042/885] Add support for `$shift` aggregation Operator. Closes: #3727 Original pull request: #3741. --- .../core/aggregation/DocumentOperators.java | 124 +++++++++++++++++- .../core/spel/MethodReferenceNode.java | 1 + .../DocumentOperatorsUnitTests.java | 17 ++- .../SpelExpressionTransformerUnitTests.java | 14 ++ 4 files changed, 153 insertions(+), 3 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java index 7a5918e047..76fa591e45 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java @@ -15,6 +15,8 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.util.Collections; + import org.bson.Document; /** @@ -45,6 +47,26 @@ public static DenseRank denseRank() { return new DenseRank(); } + /** + * Take the field referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DocumentOperatorsFactory}. + */ + public static DocumentOperatorsFactory valueOf(String fieldReference) { + return new DocumentOperatorsFactory(fieldReference); + } + + /** + * Take the value resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DocumentOperatorsFactory}. + */ + public static DocumentOperatorsFactory valueOf(AggregationExpression expression) { + return new DocumentOperatorsFactory(expression); + } + /** * Obtain the current document position. * @@ -55,6 +77,35 @@ public static DocumentNumber documentNumber() { return new DocumentNumber(); } + /** + * @author Christoph Strobl + */ + public static class DocumentOperatorsFactory { + + private Object target; + + public DocumentOperatorsFactory(Object target) { + this.target = target; + } + + /** + * Creates new {@link AggregationExpression} that applies the expression to a document at specified position + * relative to the current document. + * + * @param by the value to add to the current position. + * @return new instance of {@link Shift}. + */ + public Shift shift(int by) { + + Shift shift = usesExpression() ? Shift.shift((AggregationExpression) target) : Shift.shift(target.toString()); + return shift.by(by); + } + + private boolean usesExpression() { + return target instanceof AggregationExpression; + } + } + /** * {@link Rank} resolves the current document position (the rank) relative to other documents. If multiple documents * occupy the same rank, {@literal $rank} places the document with the subsequent value at a rank with a gap. @@ -72,8 +123,8 @@ public Document toDocument(AggregationOperationContext context) { /** * {@link DenseRank} resolves the current document position (the rank) relative to other documents. If multiple - * documents occupy the same rank, {@literal $denseRank} places the document with the subsequent value at the next rank without - * any gaps. + * documents occupy the same rank, {@literal $denseRank} places the document with the subsequent value at the next + * rank without any gaps. * * @author Christoph Strobl * @since 3.3 @@ -99,4 +150,73 @@ public Document toDocument(AggregationOperationContext context) { return new Document("$documentNumber", new Document()); } } + + /** + * Shift applies an expression to a document in a specified position relative to the current document. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Shift extends AbstractAggregationExpression { + + private Shift(Object value) { + super(value); + } + + /** + * Specifies the field to evaluate and return. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Shift}. + */ + public static Shift shift(String fieldReference) { + return new Shift(Collections.singletonMap("output", Fields.field(fieldReference))); + } + + /** + * Specifies the {@link AggregationExpression expression} to evaluate and return. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Shift}. + */ + public static Shift shift(AggregationExpression expression) { + return new Shift(Collections.singletonMap("output", expression)); + } + + /** + * Shift the document position relative to the current. Use a positive value for follow up documents (eg. 1 for the + * next) or a negative value for the predecessor documents (eg. -1 for the previous). + * + * @param shiftBy value to add to the current position. + * @return new instance of {@link Shift}. + */ + public Shift by(int shiftBy) { + return new Shift(append("by", shiftBy)); + } + + /** + * Define the default value if the target document is out of range. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Shift}. + */ + public Shift defaultTo(Object value) { + return new Shift(append("default", value)); + } + + /** + * Define the {@link AggregationExpression expression} to evaluate if the target document is out of range. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Shift}. + */ + public Shift defaultToValueOf(AggregationExpression expression) { + return defaultTo(expression); + } + + @Override + protected String getMongoMethod() { + return "$shift"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index a184c49be8..4052b2cbaa 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -72,6 +72,7 @@ public class MethodReferenceNode extends ExpressionNode { map.put("rank", emptyRef().forOperator("$rank")); map.put("denseRank", emptyRef().forOperator("$denseRank")); map.put("documentNumber", emptyRef().forOperator("$documentNumber")); + map.put("shift", mapArgRef().forOperator("$shift").mappingParametersTo("output", "by", "default")); // ARITHMETIC OPERATORS map.put("abs", singleArgRef().forOperator("$abs")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java index c6604bc543..4a29db60df 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java @@ -39,6 +39,21 @@ void rendersDenseRank() { @Test // GH-3717 void rendersDocumentNumber() { - assertThat(documentNumber().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(new Document("$documentNumber", new Document())); + assertThat(documentNumber().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(new Document("$documentNumber", new Document())); + } + + @Test // GH-3727 + void rendersShift() { + + assertThat(valueOf("quantity").shift(1).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1 } }")); + } + + @Test // GH-3727 + void rendersShiftWithDefault() { + + assertThat(valueOf("quantity").shift(1).defaultTo("Not available").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }")); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 3c20b58d58..334825a829 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -971,6 +971,20 @@ void shouldRenderDocumentNumber() { assertThat(transform("documentNumber()")).isEqualTo(Document.parse("{ $documentNumber : {} }")); } + @Test // GH-3727 + void rendersShift() { + + assertThat(transform("shift(quantity, 1)")) + .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1 } }")); + } + + @Test // GH-3727 + void rendersShiftWithDefault() { + + assertThat(transform("shift(quantity, 1, 'Not available')")) + .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From c8a791d36769e9fdd09568def1cce8c2bd524b70 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 24 Aug 2021 10:59:06 +0200 Subject: [PATCH 043/885] Polishing. Make fields final where possible. Update javadoc. Simplify assertions. Update reference docs. See: #3715, See #3717, See #3727 Original pull request: #3741. --- .../core/aggregation/DocumentOperators.java | 2 +- .../SpelExpressionTransformer.java | 2 +- .../core/spel/ExpressionTransformer.java | 2 +- .../DocumentOperatorsUnitTests.java | 15 +- .../SpelExpressionTransformerUnitTests.java | 546 +++++++++--------- src/main/asciidoc/reference/mongodb.adoc | 7 +- 6 files changed, 291 insertions(+), 283 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java index 76fa591e45..40e0065a66 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java @@ -82,7 +82,7 @@ public static DocumentNumber documentNumber() { */ public static class DocumentOperatorsFactory { - private Object target; + private final Object target; public DocumentOperatorsFactory(Object target) { this.target = target; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java index e00740945b..f47e062238 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java @@ -102,7 +102,7 @@ public Object transform(String expression, AggregationOperationContext context, ExpressionState state = new ExpressionState(new StandardEvaluationContext(params), CONFIG); ExpressionNode node = ExpressionNode.from(spelExpression.getAST(), state); - return transform(new AggregationExpressionTransformationContext(node, null, null, context)); + return transform(new AggregationExpressionTransformationContext<>(node, null, null, context)); } /* diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformer.java index d35ed2800a..3f3b405cc6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformer.java @@ -16,7 +16,7 @@ package org.springframework.data.mongodb.core.spel; /** - * SPI interface to implement components that can transfrom an {@link ExpressionTransformationContextSupport} into an + * SPI interface to implement components that can transform an {@link ExpressionTransformationContextSupport} into an * object. * * @author Oliver Gierke diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java index 4a29db60df..5cd0d4271e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java @@ -15,45 +15,46 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.aggregation.DocumentOperators.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; -import org.bson.Document; import org.junit.jupiter.api.Test; /** + * Unit tests for {@link DocumentOperators}. + * * @author Christoph Strobl */ class DocumentOperatorsUnitTests { @Test // GH-3715 void rendersRank() { - assertThat(rank().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(new Document("$rank", new Document())); + assertThat(rank().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $rank: { } }"); } @Test // GH-3715 void rendersDenseRank() { assertThat(denseRank().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(new Document("$denseRank", new Document())); + .isEqualTo("{ $denseRank: { } }"); } @Test // GH-3717 void rendersDocumentNumber() { assertThat(documentNumber().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(new Document("$documentNumber", new Document())); + .isEqualTo("{ $documentNumber: { } }"); } @Test // GH-3727 void rendersShift() { assertThat(valueOf("quantity").shift(1).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1 } }")); + .isEqualTo("{ $shift: { output: \"$quantity\", by: 1 } }"); } @Test // GH-3727 void rendersShiftWithDefault() { assertThat(valueOf("quantity").shift(1).defaultTo("Not available").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }")); + .isEqualTo("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 334825a829..ee55818018 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -24,6 +24,7 @@ import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.springframework.data.mongodb.core.Person; +import org.springframework.lang.Nullable; /** * Unit tests for {@link SpelExpressionTransformer}. @@ -34,12 +35,12 @@ */ public class SpelExpressionTransformerUnitTests { - SpelExpressionTransformer transformer = new SpelExpressionTransformer(); + private SpelExpressionTransformer transformer = new SpelExpressionTransformer(); - Data data; + private Data data; @BeforeEach - public void beforeEach() { + void beforeEach() { this.data = new Data(); this.data.primitiveLongValue = 42; @@ -50,118 +51,118 @@ public void beforeEach() { } @Test // DATAMONGO-774 - public void shouldRenderConstantExpression() { + void shouldRenderConstantExpression() { - assertThat(transform("1")).isEqualTo((Object) "1"); - assertThat(transform("-1")).isEqualTo((Object) "-1"); - assertThat(transform("1.0")).isEqualTo((Object) "1.0"); - assertThat(transform("-1.0")).isEqualTo((Object) "-1.0"); + assertThat(transform("1")).isEqualTo("1"); + assertThat(transform("-1")).isEqualTo("-1"); + assertThat(transform("1.0")).isEqualTo("1.0"); + assertThat(transform("-1.0")).isEqualTo("-1.0"); assertThat(transform("null")).isNull(); } @Test // DATAMONGO-774 - public void shouldSupportKnownOperands() { + void shouldSupportKnownOperands() { - assertThat(transform("a + b")).isEqualTo((Object) Document.parse("{ \"$add\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a - b")).isEqualTo((Object) Document.parse("{ \"$subtract\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a * b")).isEqualTo((Object) Document.parse("{ \"$multiply\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a / b")).isEqualTo((Object) Document.parse("{ \"$divide\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a % b")).isEqualTo((Object) Document.parse("{ \"$mod\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("a + b")).isEqualTo(Document.parse("{ \"$add\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("a - b")).isEqualTo(Document.parse("{ \"$subtract\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("a * b")).isEqualTo(Document.parse("{ \"$multiply\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("a / b")).isEqualTo(Document.parse("{ \"$divide\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("a % b")).isEqualTo(Document.parse("{ \"$mod\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-774 - public void shouldThrowExceptionOnUnknownOperand() { + void shouldThrowExceptionOnUnknownOperand() { assertThatIllegalArgumentException().isThrownBy(() -> transform("a++")); } @Test // DATAMONGO-774 - public void shouldRenderSumExpression() { - assertThat(transform("a + 1")).isEqualTo((Object) Document.parse("{ \"$add\" : [ \"$a\" , 1]}")); + void shouldRenderSumExpression() { + assertThat(transform("a + 1")).isEqualTo(Document.parse("{ \"$add\" : [ \"$a\" , 1]}")); } @Test // DATAMONGO-774 - public void shouldRenderFormula() { + void shouldRenderFormula() { - assertThat(transform("(netPrice + surCharge) * taxrate + 42")).isEqualTo((Object) Document.parse( + assertThat(transform("(netPrice + surCharge) * taxrate + 42")).isEqualTo(Document.parse( "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}")); } @Test // DATAMONGO-774 - public void shouldRenderFormulaInCurlyBrackets() { + void shouldRenderFormulaInCurlyBrackets() { - assertThat(transform("{(netPrice + surCharge) * taxrate + 42}")).isEqualTo((Object) Document.parse( + assertThat(transform("{(netPrice + surCharge) * taxrate + 42}")).isEqualTo(Document.parse( "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}")); } @Test // DATAMONGO-774 - public void shouldRenderFieldReference() { + void shouldRenderFieldReference() { - assertThat(transform("foo")).isEqualTo((Object) "$foo"); - assertThat(transform("$foo")).isEqualTo((Object) "$foo"); + assertThat(transform("foo")).isEqualTo("$foo"); + assertThat(transform("$foo")).isEqualTo("$foo"); } @Test // DATAMONGO-774 - public void shouldRenderNestedFieldReference() { + void shouldRenderNestedFieldReference() { - assertThat(transform("foo.bar")).isEqualTo((Object) "$foo.bar"); - assertThat(transform("$foo.bar")).isEqualTo((Object) "$foo.bar"); + assertThat(transform("foo.bar")).isEqualTo("$foo.bar"); + assertThat(transform("$foo.bar")).isEqualTo("$foo.bar"); } @Test // DATAMONGO-774 @Disabled - public void shouldRenderNestedIndexedFieldReference() { + void shouldRenderNestedIndexedFieldReference() { // TODO add support for rendering nested indexed field references - assertThat(transform("foo[3].bar")).isEqualTo((Object) "$foo[3].bar"); + assertThat(transform("foo[3].bar")).isEqualTo("$foo[3].bar"); } @Test // DATAMONGO-774 - public void shouldRenderConsecutiveOperation() { - assertThat(transform("1 + 1 + 1")).isEqualTo((Object) Document.parse("{ \"$add\" : [ 1 , 1 , 1]}")); + void shouldRenderConsecutiveOperation() { + assertThat(transform("1 + 1 + 1")).isEqualTo(Document.parse("{ \"$add\" : [ 1 , 1 , 1]}")); } @Test // DATAMONGO-774 - public void shouldRenderComplexExpression0() { + void shouldRenderComplexExpression0() { assertThat(transform("-(1 + q)")) - .isEqualTo((Object) Document.parse("{ \"$multiply\" : [ -1 , { \"$add\" : [ 1 , \"$q\"]}]}")); + .isEqualTo(Document.parse("{ \"$multiply\" : [ -1 , { \"$add\" : [ 1 , \"$q\"]}]}")); } @Test // DATAMONGO-774 - public void shouldRenderComplexExpression1() { + void shouldRenderComplexExpression1() { - assertThat(transform("1 + (q + 1) / (q - 1)")).isEqualTo((Object) Document.parse( + assertThat(transform("1 + (q + 1) / (q - 1)")).isEqualTo(Document.parse( "{ \"$add\" : [ 1 , { \"$divide\" : [ { \"$add\" : [ \"$q\" , 1]} , { \"$subtract\" : [ \"$q\" , 1]}]}]}")); } @Test // DATAMONGO-774 - public void shouldRenderComplexExpression2() { + void shouldRenderComplexExpression2() { - assertThat(transform("(q + 1 + 4 - 5) / (q + 1 + 3 + 4)")).isEqualTo((Object) Document.parse( + assertThat(transform("(q + 1 + 4 - 5) / (q + 1 + 3 + 4)")).isEqualTo(Document.parse( "{ \"$divide\" : [ { \"$subtract\" : [ { \"$add\" : [ \"$q\" , 1 , 4]} , 5]} , { \"$add\" : [ \"$q\" , 1 , 3 , 4]}]}")); } @Test // DATAMONGO-774 - public void shouldRenderBinaryExpressionWithMixedSignsCorrectly() { + void shouldRenderBinaryExpressionWithMixedSignsCorrectly() { - assertThat(transform("-4 + 1")).isEqualTo((Object) Document.parse("{ \"$add\" : [ -4 , 1]}")); - assertThat(transform("1 + -4")).isEqualTo((Object) Document.parse("{ \"$add\" : [ 1 , -4]}")); + assertThat(transform("-4 + 1")).isEqualTo(Document.parse("{ \"$add\" : [ -4 , 1]}")); + assertThat(transform("1 + -4")).isEqualTo(Document.parse("{ \"$add\" : [ 1 , -4]}")); } @Test // DATAMONGO-774 - public void shouldRenderConsecutiveOperationsInComplexExpression() { + void shouldRenderConsecutiveOperationsInComplexExpression() { assertThat(transform("1 + 1 + (1 + 1 + 1) / q")).isEqualTo( - (Object) Document.parse("{ \"$add\" : [ 1 , 1 , { \"$divide\" : [ { \"$add\" : [ 1 , 1 , 1]} , \"$q\"]}]}")); + Document.parse("{ \"$add\" : [ 1 , 1 , { \"$divide\" : [ { \"$add\" : [ 1 , 1 , 1]} , \"$q\"]}]}")); } @Test // DATAMONGO-774 - public void shouldRenderParameterExpressionResults() { - assertThat(transform("[0] + [1] + [2]", 1, 2, 3)).isEqualTo((Object) Document.parse("{ \"$add\" : [ 1 , 2 , 3]}")); + void shouldRenderParameterExpressionResults() { + assertThat(transform("[0] + [1] + [2]", 1, 2, 3)).isEqualTo(Document.parse("{ \"$add\" : [ 1 , 2 , 3]}")); } @Test // DATAMONGO-774 - public void shouldRenderNestedParameterExpressionResults() { + void shouldRenderNestedParameterExpressionResults() { assertThat( ((Document) transform("[0].primitiveLongValue + [0].primitiveDoubleValue + [0].doubleValue.longValue()", data)) @@ -171,7 +172,7 @@ public void shouldRenderNestedParameterExpressionResults() { } @Test // DATAMONGO-774 - public void shouldRenderNestedParameterExpressionResultsInNestedExpressions() { + void shouldRenderNestedParameterExpressionResultsInNestedExpressions() { Document target = ((Document) transform( "((1 + [0].primitiveLongValue) + [0].primitiveDoubleValue) * [0].doubleValue.longValue()", data)); @@ -184,765 +185,767 @@ public void shouldRenderNestedParameterExpressionResultsInNestedExpressions() { } @Test // DATAMONGO-840 - public void shouldRenderCompoundExpressionsWithIndexerAndFieldReference() { + void shouldRenderCompoundExpressionsWithIndexerAndFieldReference() { Person person = new Person(); person.setAge(10); assertThat(transform("[0].age + a.c", person)) - .isEqualTo((Object) Document.parse("{ \"$add\" : [ 10 , \"$a.c\"] }")); + .isEqualTo(Document.parse("{ \"$add\" : [ 10 , \"$a.c\"] }")); } @Test // DATAMONGO-840 - public void shouldRenderCompoundExpressionsWithOnlyFieldReferences() { + void shouldRenderCompoundExpressionsWithOnlyFieldReferences() { - assertThat(transform("a.b + a.c")).isEqualTo((Object) Document.parse("{ \"$add\" : [ \"$a.b\" , \"$a.c\"]}")); + assertThat(transform("a.b + a.c")).isEqualTo(Document.parse("{ \"$add\" : [ \"$a.b\" , \"$a.c\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeAnd() { - assertThat(transform("and(a, b)")).isEqualTo((Object) Document.parse("{ \"$and\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeAnd() { + assertThat(transform("and(a, b)")).isEqualTo(Document.parse("{ \"$and\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeOr() { - assertThat(transform("or(a, b)")).isEqualTo((Object) Document.parse("{ \"$or\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeOr() { + assertThat(transform("or(a, b)")).isEqualTo(Document.parse("{ \"$or\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeNot() { - assertThat(transform("not(a)")).isEqualTo((Object) Document.parse("{ \"$not\" : [ \"$a\"]}")); + void shouldRenderMethodReferenceNodeNot() { + assertThat(transform("not(a)")).isEqualTo(Document.parse("{ \"$not\" : [ \"$a\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSetEquals() { + void shouldRenderMethodReferenceNodeSetEquals() { assertThat(transform("setEquals(a, b)")) - .isEqualTo((Object) Document.parse("{ \"$setEquals\" : [ \"$a\" , \"$b\"]}")); + .isEqualTo(Document.parse("{ \"$setEquals\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSetEqualsForArrays() { + void shouldRenderMethodReferenceNodeSetEqualsForArrays() { assertThat(transform("setEquals(new int[]{1,2,3}, new int[]{4,5,6})")) - .isEqualTo((Object) Document.parse("{ \"$setEquals\" : [ [ 1 , 2 , 3] , [ 4 , 5 , 6]]}")); + .isEqualTo(Document.parse("{ \"$setEquals\" : [ [ 1 , 2 , 3] , [ 4 , 5 , 6]]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSetEqualsMixedArrays() { + void shouldRenderMethodReferenceNodeSetEqualsMixedArrays() { assertThat(transform("setEquals(a, new int[]{4,5,6})")) - .isEqualTo((Object) Document.parse("{ \"$setEquals\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo(Document.parse("{ \"$setEquals\" : [ \"$a\" , [ 4 , 5 , 6]]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSetIntersection() { + void shouldRenderMethodReferenceSetIntersection() { assertThat(transform("setIntersection(a, new int[]{4,5,6})")) - .isEqualTo((Object) Document.parse("{ \"$setIntersection\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo(Document.parse("{ \"$setIntersection\" : [ \"$a\" , [ 4 , 5 , 6]]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSetUnion() { + void shouldRenderMethodReferenceSetUnion() { assertThat(transform("setUnion(a, new int[]{4,5,6})")) - .isEqualTo((Object) Document.parse("{ \"$setUnion\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo(Document.parse("{ \"$setUnion\" : [ \"$a\" , [ 4 , 5 , 6]]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSeDifference() { + void shouldRenderMethodReferenceSeDifference() { assertThat(transform("setDifference(a, new int[]{4,5,6})")) - .isEqualTo((Object) Document.parse("{ \"$setDifference\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo(Document.parse("{ \"$setDifference\" : [ \"$a\" , [ 4 , 5 , 6]]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSetIsSubset() { + void shouldRenderMethodReferenceSetIsSubset() { assertThat(transform("setIsSubset(a, new int[]{4,5,6})")) - .isEqualTo((Object) Document.parse("{ \"$setIsSubset\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo(Document.parse("{ \"$setIsSubset\" : [ \"$a\" , [ 4 , 5 , 6]]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceAnyElementTrue() { - assertThat(transform("anyElementTrue(a)")).isEqualTo((Object) Document.parse("{ \"$anyElementTrue\" : [ \"$a\"]}")); + void shouldRenderMethodReferenceAnyElementTrue() { + assertThat(transform("anyElementTrue(a)")).isEqualTo(Document.parse("{ \"$anyElementTrue\" : [ \"$a\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceAllElementsTrue() { + void shouldRenderMethodReferenceAllElementsTrue() { assertThat(transform("allElementsTrue(a, new int[]{4,5,6})")) - .isEqualTo((Object) Document.parse("{ \"$allElementsTrue\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo(Document.parse("{ \"$allElementsTrue\" : [ \"$a\" , [ 4 , 5 , 6]]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceCmp() { - assertThat(transform("cmp(a, 250)")).isEqualTo((Object) Document.parse("{ \"$cmp\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceCmp() { + assertThat(transform("cmp(a, 250)")).isEqualTo(Document.parse("{ \"$cmp\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceEq() { - assertThat(transform("eq(a, 250)")).isEqualTo((Object) Document.parse("{ \"$eq\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceEq() { + assertThat(transform("eq(a, 250)")).isEqualTo(Document.parse("{ \"$eq\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceGt() { - assertThat(transform("gt(a, 250)")).isEqualTo((Object) Document.parse("{ \"$gt\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceGt() { + assertThat(transform("gt(a, 250)")).isEqualTo(Document.parse("{ \"$gt\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceGte() { - assertThat(transform("gte(a, 250)")).isEqualTo((Object) Document.parse("{ \"$gte\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceGte() { + assertThat(transform("gte(a, 250)")).isEqualTo(Document.parse("{ \"$gte\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLt() { - assertThat(transform("lt(a, 250)")).isEqualTo((Object) Document.parse("{ \"$lt\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceLt() { + assertThat(transform("lt(a, 250)")).isEqualTo(Document.parse("{ \"$lt\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLte() { - assertThat(transform("lte(a, 250)")).isEqualTo((Object) Document.parse("{ \"$lte\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceLte() { + assertThat(transform("lte(a, 250)")).isEqualTo(Document.parse("{ \"$lte\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNe() { - assertThat(transform("ne(a, 250)")).isEqualTo((Object) Document.parse("{ \"$ne\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceNe() { + assertThat(transform("ne(a, 250)")).isEqualTo(Document.parse("{ \"$ne\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceAbs() { - assertThat(transform("abs(1)")).isEqualTo((Object) Document.parse("{ \"$abs\" : 1}")); + void shouldRenderMethodReferenceAbs() { + assertThat(transform("abs(1)")).isEqualTo(Document.parse("{ \"$abs\" : 1}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceAdd() { - assertThat(transform("add(a, 250)")).isEqualTo((Object) Document.parse("{ \"$add\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceAdd() { + assertThat(transform("add(a, 250)")).isEqualTo(Document.parse("{ \"$add\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceCeil() { - assertThat(transform("ceil(7.8)")).isEqualTo((Object) Document.parse("{ \"$ceil\" : 7.8}")); + void shouldRenderMethodReferenceCeil() { + assertThat(transform("ceil(7.8)")).isEqualTo(Document.parse("{ \"$ceil\" : 7.8}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceDivide() { - assertThat(transform("divide(a, 250)")).isEqualTo((Object) Document.parse("{ \"$divide\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceDivide() { + assertThat(transform("divide(a, 250)")).isEqualTo(Document.parse("{ \"$divide\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceExp() { - assertThat(transform("exp(2)")).isEqualTo((Object) Document.parse("{ \"$exp\" : 2}")); + void shouldRenderMethodReferenceExp() { + assertThat(transform("exp(2)")).isEqualTo(Document.parse("{ \"$exp\" : 2}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceFloor() { - assertThat(transform("floor(2)")).isEqualTo((Object) Document.parse("{ \"$floor\" : 2}")); + void shouldRenderMethodReferenceFloor() { + assertThat(transform("floor(2)")).isEqualTo(Document.parse("{ \"$floor\" : 2}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLn() { - assertThat(transform("ln(2)")).isEqualTo((Object) Document.parse("{ \"$ln\" : 2}")); + void shouldRenderMethodReferenceLn() { + assertThat(transform("ln(2)")).isEqualTo(Document.parse("{ \"$ln\" : 2}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLog() { - assertThat(transform("log(100, 10)")).isEqualTo((Object) Document.parse("{ \"$log\" : [ 100 , 10]}")); + void shouldRenderMethodReferenceLog() { + assertThat(transform("log(100, 10)")).isEqualTo(Document.parse("{ \"$log\" : [ 100 , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLog10() { - assertThat(transform("log10(100)")).isEqualTo((Object) Document.parse("{ \"$log10\" : 100}")); + void shouldRenderMethodReferenceLog10() { + assertThat(transform("log10(100)")).isEqualTo(Document.parse("{ \"$log10\" : 100}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeMod() { - assertThat(transform("mod(a, b)")).isEqualTo((Object) Document.parse("{ \"$mod\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeMod() { + assertThat(transform("mod(a, b)")).isEqualTo(Document.parse("{ \"$mod\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeMultiply() { - assertThat(transform("multiply(a, b)")).isEqualTo((Object) Document.parse("{ \"$multiply\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeMultiply() { + assertThat(transform("multiply(a, b)")).isEqualTo(Document.parse("{ \"$multiply\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodePow() { - assertThat(transform("pow(a, 2)")).isEqualTo((Object) Document.parse("{ \"$pow\" : [ \"$a\" , 2]}")); + void shouldRenderMethodReferenceNodePow() { + assertThat(transform("pow(a, 2)")).isEqualTo(Document.parse("{ \"$pow\" : [ \"$a\" , 2]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSqrt() { - assertThat(transform("sqrt(2)")).isEqualTo((Object) Document.parse("{ \"$sqrt\" : 2}")); + void shouldRenderMethodReferenceSqrt() { + assertThat(transform("sqrt(2)")).isEqualTo(Document.parse("{ \"$sqrt\" : 2}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSubtract() { - assertThat(transform("subtract(a, b)")).isEqualTo((Object) Document.parse("{ \"$subtract\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeSubtract() { + assertThat(transform("subtract(a, b)")).isEqualTo(Document.parse("{ \"$subtract\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceTrunc() { - assertThat(transform("trunc(2.1)")).isEqualTo((Object) Document.parse("{ \"$trunc\" : 2.1}")); + void shouldRenderMethodReferenceTrunc() { + assertThat(transform("trunc(2.1)")).isEqualTo(Document.parse("{ \"$trunc\" : 2.1}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeConcat() { + void shouldRenderMethodReferenceNodeConcat() { assertThat(transform("concat(a, b, 'c')")) - .isEqualTo((Object) Document.parse("{ \"$concat\" : [ \"$a\" , \"$b\" , \"c\"]}")); + .isEqualTo(Document.parse("{ \"$concat\" : [ \"$a\" , \"$b\" , \"c\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSubstrc() { - assertThat(transform("substr(a, 0, 1)")).isEqualTo((Object) Document.parse("{ \"$substr\" : [ \"$a\" , 0 , 1]}")); + void shouldRenderMethodReferenceNodeSubstrc() { + assertThat(transform("substr(a, 0, 1)")).isEqualTo(Document.parse("{ \"$substr\" : [ \"$a\" , 0 , 1]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceToLower() { - assertThat(transform("toLower(a)")).isEqualTo((Object) Document.parse("{ \"$toLower\" : \"$a\"}")); + void shouldRenderMethodReferenceToLower() { + assertThat(transform("toLower(a)")).isEqualTo(Document.parse("{ \"$toLower\" : \"$a\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceToUpper() { - assertThat(transform("toUpper(a)")).isEqualTo((Object) Document.parse("{ \"$toUpper\" : \"$a\"}")); + void shouldRenderMethodReferenceToUpper() { + assertThat(transform("toUpper(a)")).isEqualTo(Document.parse("{ \"$toUpper\" : \"$a\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeStrCaseCmp() { + void shouldRenderMethodReferenceNodeStrCaseCmp() { assertThat(transform("strcasecmp(a, b)")) - .isEqualTo((Object) Document.parse("{ \"$strcasecmp\" : [ \"$a\" , \"$b\"]}")); + .isEqualTo(Document.parse("{ \"$strcasecmp\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceMeta() { - assertThat(transform("meta('textScore')")).isEqualTo((Object) Document.parse("{ \"$meta\" : \"textScore\"}")); + void shouldRenderMethodReferenceMeta() { + assertThat(transform("meta('textScore')")).isEqualTo(Document.parse("{ \"$meta\" : \"textScore\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeArrayElemAt() { + void shouldRenderMethodReferenceNodeArrayElemAt() { assertThat(transform("arrayElemAt(a, 10)")) - .isEqualTo((Object) Document.parse("{ \"$arrayElemAt\" : [ \"$a\" , 10]}")); + .isEqualTo(Document.parse("{ \"$arrayElemAt\" : [ \"$a\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeConcatArrays() { + void shouldRenderMethodReferenceNodeConcatArrays() { assertThat(transform("concatArrays(a, b, c)")) - .isEqualTo((Object) Document.parse("{ \"$concatArrays\" : [ \"$a\" , \"$b\" , \"$c\"]}")); + .isEqualTo(Document.parse("{ \"$concatArrays\" : [ \"$a\" , \"$b\" , \"$c\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeFilter() { - assertThat(transform("filter(a, 'num', '$$num' > 10)")).isEqualTo((Object) Document.parse( + void shouldRenderMethodReferenceNodeFilter() { + assertThat(transform("filter(a, 'num', '$$num' > 10)")).isEqualTo(Document.parse( "{ \"$filter\" : { \"input\" : \"$a\" , \"as\" : \"num\" , \"cond\" : { \"$gt\" : [ \"$$num\" , 10]}}}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceIsArray() { - assertThat(transform("isArray(a)")).isEqualTo((Object) Document.parse("{ \"$isArray\" : \"$a\"}")); + void shouldRenderMethodReferenceIsArray() { + assertThat(transform("isArray(a)")).isEqualTo(Document.parse("{ \"$isArray\" : \"$a\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceIsSize() { - assertThat(transform("size(a)")).isEqualTo((Object) Document.parse("{ \"$size\" : \"$a\"}")); + void shouldRenderMethodReferenceIsSize() { + assertThat(transform("size(a)")).isEqualTo(Document.parse("{ \"$size\" : \"$a\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSlice() { - assertThat(transform("slice(a, 10)")).isEqualTo((Object) Document.parse("{ \"$slice\" : [ \"$a\" , 10]}")); + void shouldRenderMethodReferenceNodeSlice() { + assertThat(transform("slice(a, 10)")).isEqualTo(Document.parse("{ \"$slice\" : [ \"$a\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeMap() { - assertThat(transform("map(quizzes, 'grade', '$$grade' + 2)")).isEqualTo((Object) Document.parse( + void shouldRenderMethodReferenceNodeMap() { + assertThat(transform("map(quizzes, 'grade', '$$grade' + 2)")).isEqualTo(Document.parse( "{ \"$map\" : { \"input\" : \"$quizzes\" , \"as\" : \"grade\" , \"in\" : { \"$add\" : [ \"$$grade\" , 2]}}}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeLet() { - assertThat(transform("let({low:1, high:'$$low'}, gt('$$low', '$$high'))")).isEqualTo((Object) Document.parse( + void shouldRenderMethodReferenceNodeLet() { + assertThat(transform("let({low:1, high:'$$low'}, gt('$$low', '$$high'))")).isEqualTo(Document.parse( "{ \"$let\" : { \"vars\" : { \"low\" : 1 , \"high\" : \"$$low\"} , \"in\" : { \"$gt\" : [ \"$$low\" , \"$$high\"]}}}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLiteral() { - assertThat(transform("literal($1)")).isEqualTo((Object) Document.parse("{ \"$literal\" : \"$1\"}")); + void shouldRenderMethodReferenceLiteral() { + assertThat(transform("literal($1)")).isEqualTo(Document.parse("{ \"$literal\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceDayOfYear() { - assertThat(transform("dayOfYear($1)")).isEqualTo((Object) Document.parse("{ \"$dayOfYear\" : \"$1\"}")); + void shouldRenderMethodReferenceDayOfYear() { + assertThat(transform("dayOfYear($1)")).isEqualTo(Document.parse("{ \"$dayOfYear\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceDayOfMonth() { - assertThat(transform("dayOfMonth($1)")).isEqualTo((Object) Document.parse("{ \"$dayOfMonth\" : \"$1\"}")); + void shouldRenderMethodReferenceDayOfMonth() { + assertThat(transform("dayOfMonth($1)")).isEqualTo(Document.parse("{ \"$dayOfMonth\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceDayOfWeek() { - assertThat(transform("dayOfWeek($1)")).isEqualTo((Object) Document.parse("{ \"$dayOfWeek\" : \"$1\"}")); + void shouldRenderMethodReferenceDayOfWeek() { + assertThat(transform("dayOfWeek($1)")).isEqualTo(Document.parse("{ \"$dayOfWeek\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceYear() { - assertThat(transform("year($1)")).isEqualTo((Object) Document.parse("{ \"$year\" : \"$1\"}")); + void shouldRenderMethodReferenceYear() { + assertThat(transform("year($1)")).isEqualTo(Document.parse("{ \"$year\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceMonth() { - assertThat(transform("month($1)")).isEqualTo((Object) Document.parse("{ \"$month\" : \"$1\"}")); + void shouldRenderMethodReferenceMonth() { + assertThat(transform("month($1)")).isEqualTo(Document.parse("{ \"$month\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceWeek() { - assertThat(transform("week($1)")).isEqualTo((Object) Document.parse("{ \"$week\" : \"$1\"}")); + void shouldRenderMethodReferenceWeek() { + assertThat(transform("week($1)")).isEqualTo(Document.parse("{ \"$week\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceHour() { - assertThat(transform("hour($1)")).isEqualTo((Object) Document.parse("{ \"$hour\" : \"$1\"}")); + void shouldRenderMethodReferenceHour() { + assertThat(transform("hour($1)")).isEqualTo(Document.parse("{ \"$hour\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceMinute() { - assertThat(transform("minute($1)")).isEqualTo((Object) Document.parse("{ \"$minute\" : \"$1\"}")); + void shouldRenderMethodReferenceMinute() { + assertThat(transform("minute($1)")).isEqualTo(Document.parse("{ \"$minute\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSecond() { - assertThat(transform("second($1)")).isEqualTo((Object) Document.parse("{ \"$second\" : \"$1\"}")); + void shouldRenderMethodReferenceSecond() { + assertThat(transform("second($1)")).isEqualTo(Document.parse("{ \"$second\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceMillisecond() { - assertThat(transform("millisecond($1)")).isEqualTo((Object) Document.parse("{ \"$millisecond\" : \"$1\"}")); + void shouldRenderMethodReferenceMillisecond() { + assertThat(transform("millisecond($1)")).isEqualTo(Document.parse("{ \"$millisecond\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceDateToString() { + void shouldRenderMethodReferenceDateToString() { assertThat(transform("dateToString('%Y-%m-%d', $date)")).isEqualTo( - (Object) Document.parse("{ \"$dateToString\" : { \"format\" : \"%Y-%m-%d\" , \"date\" : \"$date\"}}")); + Document.parse("{ \"$dateToString\" : { \"format\" : \"%Y-%m-%d\" , \"date\" : \"$date\"}}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceCond() { - assertThat(transform("cond(qty > 250, 30, 20)")).isEqualTo((Object) Document + void shouldRenderMethodReferenceCond() { + assertThat(transform("cond(qty > 250, 30, 20)")).isEqualTo( + Document .parse("{ \"$cond\" : { \"if\" : { \"$gt\" : [ \"$qty\" , 250]} , \"then\" : 30 , \"else\" : 20}}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeIfNull() { - assertThat(transform("ifNull(a, 10)")).isEqualTo((Object) Document.parse("{ \"$ifNull\" : [ \"$a\" , 10]}")); + void shouldRenderMethodReferenceNodeIfNull() { + assertThat(transform("ifNull(a, 10)")).isEqualTo(Document.parse("{ \"$ifNull\" : [ \"$a\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSum() { - assertThat(transform("sum(a, b)")).isEqualTo((Object) Document.parse("{ \"$sum\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeSum() { + assertThat(transform("sum(a, b)")).isEqualTo(Document.parse("{ \"$sum\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeAvg() { - assertThat(transform("avg(a, b)")).isEqualTo((Object) Document.parse("{ \"$avg\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeAvg() { + assertThat(transform("avg(a, b)")).isEqualTo(Document.parse("{ \"$avg\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceFirst() { - assertThat(transform("first($1)")).isEqualTo((Object) Document.parse("{ \"$first\" : \"$1\"}")); + void shouldRenderMethodReferenceFirst() { + assertThat(transform("first($1)")).isEqualTo(Document.parse("{ \"$first\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLast() { - assertThat(transform("last($1)")).isEqualTo((Object) Document.parse("{ \"$last\" : \"$1\"}")); + void shouldRenderMethodReferenceLast() { + assertThat(transform("last($1)")).isEqualTo(Document.parse("{ \"$last\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeMax() { - assertThat(transform("max(a, b)")).isEqualTo((Object) Document.parse("{ \"$max\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeMax() { + assertThat(transform("max(a, b)")).isEqualTo(Document.parse("{ \"$max\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeMin() { - assertThat(transform("min(a, b)")).isEqualTo((Object) Document.parse("{ \"$min\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeMin() { + assertThat(transform("min(a, b)")).isEqualTo(Document.parse("{ \"$min\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodePush() { + void shouldRenderMethodReferenceNodePush() { assertThat(transform("push({'item':'$item', 'quantity':'$qty'})")) - .isEqualTo((Object) Document.parse("{ \"$push\" : { \"item\" : \"$item\" , \"quantity\" : \"$qty\"}}")); + .isEqualTo(Document.parse("{ \"$push\" : { \"item\" : \"$item\" , \"quantity\" : \"$qty\"}}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceAddToSet() { - assertThat(transform("addToSet($1)")).isEqualTo((Object) Document.parse("{ \"$addToSet\" : \"$1\"}")); + void shouldRenderMethodReferenceAddToSet() { + assertThat(transform("addToSet($1)")).isEqualTo(Document.parse("{ \"$addToSet\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeStdDevPop() { + void shouldRenderMethodReferenceNodeStdDevPop() { assertThat(transform("stdDevPop(scores.score)")) - .isEqualTo((Object) Document.parse("{ \"$stdDevPop\" : [ \"$scores.score\"]}")); + .isEqualTo(Document.parse("{ \"$stdDevPop\" : [ \"$scores.score\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeStdDevSamp() { - assertThat(transform("stdDevSamp(age)")).isEqualTo((Object) Document.parse("{ \"$stdDevSamp\" : [ \"$age\"]}")); + void shouldRenderMethodReferenceNodeStdDevSamp() { + assertThat(transform("stdDevSamp(age)")).isEqualTo(Document.parse("{ \"$stdDevSamp\" : [ \"$age\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeEq() { - assertThat(transform("foo == 10")).isEqualTo((Object) Document.parse("{ \"$eq\" : [ \"$foo\" , 10]}")); + void shouldRenderOperationNodeEq() { + assertThat(transform("foo == 10")).isEqualTo(Document.parse("{ \"$eq\" : [ \"$foo\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeNe() { - assertThat(transform("foo != 10")).isEqualTo((Object) Document.parse("{ \"$ne\" : [ \"$foo\" , 10]}")); + void shouldRenderOperationNodeNe() { + assertThat(transform("foo != 10")).isEqualTo(Document.parse("{ \"$ne\" : [ \"$foo\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeGt() { - assertThat(transform("foo > 10")).isEqualTo((Object) Document.parse("{ \"$gt\" : [ \"$foo\" , 10]}")); + void shouldRenderOperationNodeGt() { + assertThat(transform("foo > 10")).isEqualTo(Document.parse("{ \"$gt\" : [ \"$foo\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeGte() { - assertThat(transform("foo >= 10")).isEqualTo((Object) Document.parse("{ \"$gte\" : [ \"$foo\" , 10]}")); + void shouldRenderOperationNodeGte() { + assertThat(transform("foo >= 10")).isEqualTo(Document.parse("{ \"$gte\" : [ \"$foo\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeLt() { - assertThat(transform("foo < 10")).isEqualTo((Object) Document.parse("{ \"$lt\" : [ \"$foo\" , 10]}")); + void shouldRenderOperationNodeLt() { + assertThat(transform("foo < 10")).isEqualTo(Document.parse("{ \"$lt\" : [ \"$foo\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeLte() { - assertThat(transform("foo <= 10")).isEqualTo((Object) Document.parse("{ \"$lte\" : [ \"$foo\" , 10]}")); + void shouldRenderOperationNodeLte() { + assertThat(transform("foo <= 10")).isEqualTo(Document.parse("{ \"$lte\" : [ \"$foo\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodePow() { - assertThat(transform("foo^2")).isEqualTo((Object) Document.parse("{ \"$pow\" : [ \"$foo\" , 2]}")); + void shouldRenderOperationNodePow() { + assertThat(transform("foo^2")).isEqualTo(Document.parse("{ \"$pow\" : [ \"$foo\" , 2]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeOr() { - assertThat(transform("true || false")).isEqualTo((Object) Document.parse("{ \"$or\" : [ true , false]}")); + void shouldRenderOperationNodeOr() { + assertThat(transform("true || false")).isEqualTo(Document.parse("{ \"$or\" : [ true , false]}")); } @Test // DATAMONGO-1530 - public void shouldRenderComplexOperationNodeOr() { + void shouldRenderComplexOperationNodeOr() { assertThat(transform("1+2 || concat(a, b) || true")).isEqualTo( - (Object) Document.parse("{ \"$or\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}")); + Document.parse("{ \"$or\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeAnd() { - assertThat(transform("true && false")).isEqualTo((Object) Document.parse("{ \"$and\" : [ true , false]}")); + void shouldRenderOperationNodeAnd() { + assertThat(transform("true && false")).isEqualTo(Document.parse("{ \"$and\" : [ true , false]}")); } @Test // DATAMONGO-1530 - public void shouldRenderComplexOperationNodeAnd() { - assertThat(transform("1+2 && concat(a, b) && true")).isEqualTo((Object) Document + void shouldRenderComplexOperationNodeAnd() { + assertThat(transform("1+2 && concat(a, b) && true")).isEqualTo( + Document .parse("{ \"$and\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}")); } @Test // DATAMONGO-1530 - public void shouldRenderNotCorrectly() { - assertThat(transform("!true")).isEqualTo((Object) Document.parse("{ \"$not\" : [ true]}")); + void shouldRenderNotCorrectly() { + assertThat(transform("!true")).isEqualTo(Document.parse("{ \"$not\" : [ true]}")); } @Test // DATAMONGO-1530 - public void shouldRenderComplexNotCorrectly() { + void shouldRenderComplexNotCorrectly() { assertThat(transform("!(foo > 10)")) - .isEqualTo((Object) Document.parse("{ \"$not\" : [ { \"$gt\" : [ \"$foo\" , 10]}]}")); + .isEqualTo(Document.parse("{ \"$not\" : [ { \"$gt\" : [ \"$foo\" , 10]}]}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceIndexOfBytes() { + void shouldRenderMethodReferenceIndexOfBytes() { assertThat(transform("indexOfBytes(item, 'foo')")) .isEqualTo(Document.parse("{ \"$indexOfBytes\" : [ \"$item\" , \"foo\"]}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceIndexOfCP() { + void shouldRenderMethodReferenceIndexOfCP() { assertThat(transform("indexOfCP(item, 'foo')")) .isEqualTo(Document.parse("{ \"$indexOfCP\" : [ \"$item\" , \"foo\"]}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceSplit() { + void shouldRenderMethodReferenceSplit() { assertThat(transform("split(item, ',')")).isEqualTo(Document.parse("{ \"$split\" : [ \"$item\" , \",\"]}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceStrLenBytes() { + void shouldRenderMethodReferenceStrLenBytes() { assertThat(transform("strLenBytes(item)")).isEqualTo(Document.parse("{ \"$strLenBytes\" : \"$item\"}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceStrLenCP() { + void shouldRenderMethodReferenceStrLenCP() { assertThat(transform("strLenCP(item)")).isEqualTo(Document.parse("{ \"$strLenCP\" : \"$item\"}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodSubstrCP() { + void shouldRenderMethodSubstrCP() { assertThat(transform("substrCP(item, 0, 5)")).isEqualTo(Document.parse("{ \"$substrCP\" : [ \"$item\" , 0 , 5]}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceReverseArray() { + void shouldRenderMethodReferenceReverseArray() { assertThat(transform("reverseArray(array)")).isEqualTo(Document.parse("{ \"$reverseArray\" : \"$array\"}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceReduce() { + void shouldRenderMethodReferenceReduce() { assertThat(transform("reduce(field, '', {'$concat':{'$$value','$$this'}})")).isEqualTo(Document.parse( "{ \"$reduce\" : { \"input\" : \"$field\" , \"initialValue\" : \"\" , \"in\" : { \"$concat\" : [ \"$$value\" , \"$$this\"]}}}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceZip() { + void shouldRenderMethodReferenceZip() { assertThat(transform("zip(new String[]{'$array1', '$array2'})")) .isEqualTo(Document.parse("{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"]}}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceZipWithOptionalArgs() { + void shouldRenderMethodReferenceZipWithOptionalArgs() { assertThat(transform("zip(new String[]{'$array1', '$array2'}, true, new int[]{1,2})")).isEqualTo(Document.parse( "{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"] , \"useLongestLength\" : true , \"defaults\" : [ 1 , 2]}}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodIn() { + void shouldRenderMethodIn() { assertThat(transform("in('item', array)")).isEqualTo(Document.parse("{ \"$in\" : [ \"item\" , \"$array\"]}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodRefereneIsoDayOfWeek() { + void shouldRenderMethodRefereneIsoDayOfWeek() { assertThat(transform("isoDayOfWeek(date)")).isEqualTo(Document.parse("{ \"$isoDayOfWeek\" : \"$date\"}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodRefereneIsoWeek() { + void shouldRenderMethodRefereneIsoWeek() { assertThat(transform("isoWeek(date)")).isEqualTo(Document.parse("{ \"$isoWeek\" : \"$date\"}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodRefereneIsoWeekYear() { + void shouldRenderMethodRefereneIsoWeekYear() { assertThat(transform("isoWeekYear(date)")).isEqualTo(Document.parse("{ \"$isoWeekYear\" : \"$date\"}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodRefereneType() { + void shouldRenderMethodRefereneType() { assertThat(transform("type(a)")).isEqualTo(Document.parse("{ \"$type\" : \"$a\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderArrayToObjectWithFieldReference() { + void shouldRenderArrayToObjectWithFieldReference() { assertThat(transform("arrayToObject(field)")).isEqualTo(Document.parse("{ \"$arrayToObject\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderArrayToObjectWithArray() { + void shouldRenderArrayToObjectWithArray() { assertThat(transform("arrayToObject(new String[]{'key', 'value'})")) .isEqualTo(Document.parse("{ \"$arrayToObject\" : [\"key\", \"value\"]}")); } @Test // DATAMONGO-2077 - public void shouldRenderObjectToArrayWithFieldReference() { + void shouldRenderObjectToArrayWithFieldReference() { assertThat(transform("objectToArray(field)")).isEqualTo(Document.parse("{ \"$objectToArray\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderMergeObjects() { + void shouldRenderMergeObjects() { assertThat(transform("mergeObjects(field1, $$ROOT)")) .isEqualTo(Document.parse("{ \"$mergeObjects\" : [\"$field1\", \"$$ROOT\"]}")); } @Test // DATAMONGO-2077 - public void shouldRenderTrimWithoutChars() { + void shouldRenderTrimWithoutChars() { assertThat(transform("trim(field)")).isEqualTo(Document.parse("{ \"$trim\" : {\"input\" : \"$field\"}}")); } @Test // DATAMONGO-2077 - public void shouldRenderTrimWithChars() { + void shouldRenderTrimWithChars() { assertThat(transform("trim(field, 'ie')")) .isEqualTo(Document.parse("{ \"$trim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderTrimWithCharsFromFieldReference() { + void shouldRenderTrimWithCharsFromFieldReference() { assertThat(transform("trim(field1, field2)")) .isEqualTo(Document.parse("{ \"$trim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderLtrimWithoutChars() { + void shouldRenderLtrimWithoutChars() { assertThat(transform("ltrim(field)")).isEqualTo(Document.parse("{ \"$ltrim\" : {\"input\" : \"$field\"}}")); } @Test // DATAMONGO-2077 - public void shouldRenderLtrimWithChars() { + void shouldRenderLtrimWithChars() { assertThat(transform("ltrim(field, 'ie')")) .isEqualTo(Document.parse("{ \"$ltrim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderLtrimWithCharsFromFieldReference() { + void shouldRenderLtrimWithCharsFromFieldReference() { assertThat(transform("ltrim(field1, field2)")) .isEqualTo(Document.parse("{ \"$ltrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderRtrimWithoutChars() { + void shouldRenderRtrimWithoutChars() { assertThat(transform("rtrim(field)")).isEqualTo(Document.parse("{ \"$rtrim\" : {\"input\" : \"$field\"}}")); } @Test // DATAMONGO-2077 - public void shouldRenderRtrimWithChars() { + void shouldRenderRtrimWithChars() { assertThat(transform("rtrim(field, 'ie')")) .isEqualTo(Document.parse("{ \"$rtrim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderRtrimWithCharsFromFieldReference() { + void shouldRenderRtrimWithCharsFromFieldReference() { assertThat(transform("rtrim(field1, field2)")) .isEqualTo(Document.parse("{ \"$rtrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderConvertWithoutOptionalParameters() { + void shouldRenderConvertWithoutOptionalParameters() { assertThat(transform("convert(field, 'string')")) .isEqualTo(Document.parse("{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"string\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderConvertWithOnError() { + void shouldRenderConvertWithOnError() { assertThat(transform("convert(field, 'int', 'Not an integer.')")).isEqualTo(Document .parse("{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"int\", \"onError\" : \"Not an integer.\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderConvertWithOnErrorOnNull() { + void shouldRenderConvertWithOnErrorOnNull() { assertThat(transform("convert(field, 'int', 'Not an integer.', -1)")).isEqualTo(Document.parse( "{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"int\", \"onError\" : \"Not an integer.\", \"onNull\" : -1 }}")); } @Test // DATAMONGO-2077 - public void shouldRenderToBool() { + void shouldRenderToBool() { assertThat(transform("toBool(field)")).isEqualTo(Document.parse("{ \"$toBool\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderToDate() { + void shouldRenderToDate() { assertThat(transform("toDate(field)")).isEqualTo(Document.parse("{ \"$toDate\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderToDecimal() { + void shouldRenderToDecimal() { assertThat(transform("toDecimal(field)")).isEqualTo(Document.parse("{ \"$toDecimal\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderToDouble() { + void shouldRenderToDouble() { assertThat(transform("toDouble(field)")).isEqualTo(Document.parse("{ \"$toDouble\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderToInt() { + void shouldRenderToInt() { assertThat(transform("toInt(field)")).isEqualTo(Document.parse("{ \"$toInt\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderToLong() { + void shouldRenderToLong() { assertThat(transform("toLong(field)")).isEqualTo(Document.parse("{ \"$toLong\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderToObjectId() { + void shouldRenderToObjectId() { assertThat(transform("toObjectId(field)")).isEqualTo(Document.parse("{ \"$toObjectId\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderToString() { + void shouldRenderToString() { assertThat(transform("toString(field)")).isEqualTo(Document.parse("{ \"$toString\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderDateFromStringWithoutOptionalParameters() { + void shouldRenderDateFromStringWithoutOptionalParameters() { assertThat(transform("dateFromString(field)")) .isEqualTo(Document.parse("{ \"$dateFromString\" : {\"dateString\" : \"$field\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderDateFromStringWithFormat() { + void shouldRenderDateFromStringWithFormat() { assertThat(transform("dateFromString(field, 'DD-MM-YYYY')")).isEqualTo( Document.parse("{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderDateFromStringWithFormatAndTimezone() { + void shouldRenderDateFromStringWithFormatAndTimezone() { assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC')")).isEqualTo(Document.parse( "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderDateFromStringWithFormatTimezoneAndOnError() { + void shouldRenderDateFromStringWithFormatTimezoneAndOnError() { assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC', -1)")).isEqualTo(Document.parse( "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\", \"onError\" : -1 }}")); } @Test // DATAMONGO-2077 - public void shouldRenderDateFromStringWithFormatTimezoneOnErrorAndOnNull() { + void shouldRenderDateFromStringWithFormatTimezoneOnErrorAndOnNull() { assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC', -1, -2)")).isEqualTo(Document.parse( "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\", \"onError\" : -1, \"onNull\" : -2}}")); } @Test // DATAMONGO-2077, DATAMONGO-2671 - public void shouldRenderDateFromParts() { + void shouldRenderDateFromParts() { assertThat(transform("dateFromParts(y, m, d, h, mm, s, ms, 'UTC')")).isEqualTo(Document.parse( "{ \"$dateFromParts\" : {\"year\" : \"$y\", \"month\" : \"$m\", \"day\" : \"$d\", \"hour\" : \"$h\", \"minute\" : \"$mm\", \"second\" : \"$s\", \"millisecond\" : \"$ms\", \"timezone\" : \"UTC\"}}")); } @Test // DATAMONGO-2077, DATAMONGO-2671 - public void shouldRenderIsoDateFromParts() { + void shouldRenderIsoDateFromParts() { assertThat(transform("isoDateFromParts(y, m, d, h, mm, s, ms, 'UTC')")).isEqualTo(Document.parse( "{ \"$dateFromParts\" : {\"isoWeekYear\" : \"$y\", \"isoWeek\" : \"$m\", \"isoDayOfWeek\" : \"$d\", \"hour\" : \"$h\", \"minute\" : \"$mm\", \"second\" : \"$s\", \"millisecond\" : \"$ms\", \"timezone\" : \"UTC\"}}")); } @Test // DATAMONGO-2077 - public void shouldRenderDateToParts() { + void shouldRenderDateToParts() { assertThat(transform("dateToParts(field, 'UTC', false)")).isEqualTo( Document.parse("{ \"$dateToParts\" : {\"date\" : \"$field\", \"timezone\" : \"UTC\", \"iso8601\" : false}}")); } @Test // DATAMONGO-2077 - public void shouldRenderIndexOfArray() { + void shouldRenderIndexOfArray() { assertThat(transform("indexOfArray(field, 2)")) .isEqualTo(Document.parse("{ \"$indexOfArray\" : [\"$field\", 2 ]}")); } @Test // DATAMONGO-2077 - public void shouldRenderRange() { + void shouldRenderRange() { assertThat(transform("range(0, 10, 2)")).isEqualTo(Document.parse("{ \"$range\" : [0, 10, 2 ]}")); } @Test // DATAMONGO-2370 - public void shouldRenderRound() { + void shouldRenderRound() { assertThat(transform("round(field)")).isEqualTo(Document.parse("{ \"$round\" : [\"$field\"]}")); } @Test // DATAMONGO-2370 - public void shouldRenderRoundWithPlace() { + void shouldRenderRoundWithPlace() { assertThat(transform("round(field, 2)")).isEqualTo(Document.parse("{ \"$round\" : [\"$field\", 2]}")); } @@ -985,6 +988,7 @@ void rendersShiftWithDefault() { .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }")); } + @Nullable private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc index dfa87dd3c7..daaad49963 100644 --- a/src/main/asciidoc/reference/mongodb.adoc +++ b/src/main/asciidoc/reference/mongodb.adoc @@ -2506,13 +2506,16 @@ At the time of this writing, we provide support for the following Aggregation Op | `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators -| `abs`, `add` (*via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (*via `minus`), `trunc` +| `abs`, `add` (via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (*via `minus`), `trunc` + +| Document Operators +| `rank`, `denseRank`, `documentNumber`, `shift` | String Aggregation Operators | `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` | Comparison Aggregation Operators -| `eq` (*via: `is`), `gt`, `gte`, `lt`, `lte`, `ne` +| `eq` (via: `is`), `gt`, `gte`, `lt`, `lte`, `ne` | Array Aggregation Operators | `arrayElementAt`, `arrayToObject`, `concatArrays`, `filter`, `in`, `indexOfArray`, `isArray`, `range`, `reverseArray`, `reduce`, `size`, `slice`, `zip` From 0c481feb722d3d301445d8135633363b24c1abeb Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 24 Aug 2021 11:06:41 +0200 Subject: [PATCH 044/885] Extract Aggregation Framework and GridFS docs in own source files. Closes #3786 --- .../reference/aggregation-framework.adoc | 656 +++++++++++++++ src/main/asciidoc/reference/gridfs.adoc | 115 +++ src/main/asciidoc/reference/mongodb.adoc | 776 +----------------- 3 files changed, 774 insertions(+), 773 deletions(-) create mode 100644 src/main/asciidoc/reference/aggregation-framework.adoc create mode 100644 src/main/asciidoc/reference/gridfs.adoc diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc new file mode 100644 index 0000000000..a843af17f4 --- /dev/null +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -0,0 +1,656 @@ +[[mongo.aggregation]] +== Aggregation Framework Support + +Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2. + +For further information, see the full https://docs.mongodb.org/manual/aggregation/[reference documentation] of the aggregation framework and other data aggregation tools for MongoDB. + +[[mongo.aggregation.basic-concepts]] +=== Basic Concepts + +The Aggregation Framework support in Spring Data MongoDB is based on the following key abstractions: `Aggregation`, `AggregationDefinition`, and `AggregationResults`. + +* `Aggregation` ++ +An `Aggregation` represents a MongoDB `aggregate` operation and holds the description of the aggregation pipeline instructions. Aggregations are created by invoking the appropriate `newAggregation(…)` static factory method of the `Aggregation` class, which takes a list of `AggregateOperation` and an optional input class. ++ +The actual aggregate operation is run by the `aggregate` method of the `MongoTemplate`, which takes the desired output class as a parameter. ++ +* `TypedAggregation` ++ +A `TypedAggregation`, just like an `Aggregation`, holds the instructions of the aggregation pipeline and a reference to the input type, that is used for mapping domain properties to actual document fields. ++ +At runtime, field references get checked against the given input type, considering potential `@Field` annotations. +[NOTE] +==== +Changed in 3.2 referencing non-existent properties does no longer raise errors. To restore the previous behaviour use the `strictMapping` option of `AggregationOptions`. +==== +* `AggregationDefinition` ++ +An `AggregationDefinition` represents a MongoDB aggregation pipeline operation and describes the processing that should be performed in this aggregation step. Although you could manually create an `AggregationDefinition`, we recommend using the static factory methods provided by the `Aggregate` class to construct an `AggregateOperation`. ++ +* `AggregationResults` ++ +`AggregationResults` is the container for the result of an aggregate operation. It provides access to the raw aggregation result, in the form of a `Document` to the mapped objects and other information about the aggregation. ++ +The following listing shows the canonical example for using the Spring Data MongoDB support for the MongoDB Aggregation Framework: ++ +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +Aggregation agg = newAggregation( + pipelineOP1(), + pipelineOP2(), + pipelineOPn() +); + +AggregationResults results = mongoTemplate.aggregate(agg, "INPUT_COLLECTION_NAME", OutputType.class); +List mappedResult = results.getMappedResults(); +---- + +Note that, if you provide an input class as the first parameter to the `newAggregation` method, the `MongoTemplate` derives the name of the input collection from this class. Otherwise, if you do not not specify an input class, you must provide the name of the input collection explicitly. If both an input class and an input collection are provided, the latter takes precedence. + +[[mongo.aggregation.supported-aggregation-operations]] +=== Supported Aggregation Operations + +The MongoDB Aggregation Framework provides the following types of aggregation operations: + +* Pipeline Aggregation Operators +* Group Aggregation Operators +* Boolean Aggregation Operators +* Comparison Aggregation Operators +* Arithmetic Aggregation Operators +* String Aggregation Operators +* Date Aggregation Operators +* Array Aggregation Operators +* Conditional Aggregation Operators +* Lookup Aggregation Operators +* Convert Aggregation Operators +* Object Aggregation Operators +* Script Aggregation Operators + +At the time of this writing, we provide support for the following Aggregation Operations in Spring Data MongoDB: + +.Aggregation Operations currently supported by Spring Data MongoDB +[cols="2*"] +|=== +| Pipeline Aggregation Operators +| `bucket`, `bucketAuto`, `count`, `facet`, `geoNear`, `graphLookup`, `group`, `limit`, `lookup`, `match`, `project`, `replaceRoot`, `skip`, `sort`, `unwind` + +| Set Aggregation Operators +| `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue` + +| Group/Accumulator Aggregation Operators +| `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp` + +| Arithmetic Aggregation Operators +| `abs`, `add` (via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (*via `minus`), `trunc` + +| Document Operators +| `rank`, `denseRank`, `documentNumber`, `shift` + +| String Aggregation Operators +| `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` + +| Comparison Aggregation Operators +| `eq` (via: `is`), `gt`, `gte`, `lt`, `lte`, `ne` + +| Array Aggregation Operators +| `arrayElementAt`, `arrayToObject`, `concatArrays`, `filter`, `in`, `indexOfArray`, `isArray`, `range`, `reverseArray`, `reduce`, `size`, `slice`, `zip` + +| Literal Operators +| `literal` + +| Date Aggregation Operators +| `dayOfYear`, `dayOfMonth`, `dayOfWeek`, `year`, `month`, `week`, `hour`, `minute`, `second`, `millisecond`, `dateToString`, `dateFromString`, `dateFromParts`, `dateToParts`, `isoDayOfWeek`, `isoWeek`, `isoWeekYear` + +| Variable Operators +| `map` + +| Conditional Aggregation Operators +| `cond`, `ifNull`, `switch` + +| Type Aggregation Operators +| `type` + +| Convert Aggregation Operators +| `convert`, `toBool`, `toDate`, `toDecimal`, `toDouble`, `toInt`, `toLong`, `toObjectId`, `toString` + +| Object Aggregation Operators +| `objectToArray`, `mergeObjects` + +| Script Aggregation Operators +| `function`, `accumulator` +|=== + +* The operation is mapped or added by Spring Data MongoDB. + +Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB.Comparison aggregation operators are expressed as `Criteria` expressions. + +[[mongo.aggregation.projection]] +=== Projection Expressions + +Projection expressions are used to define the fields that are the outcome of a particular aggregation step. Projection expressions can be defined through the `project` method of the `Aggregation` class, either by passing a list of `String` objects or an aggregation framework `Fields` object. The projection can be extended with additional fields through a fluent API by using the `and(String)` method and aliased by using the `as(String)` method. +Note that you can also define fields with aliases by using the `Fields.field` static factory method of the aggregation framework, which you can then use to construct a new `Fields` instance. References to projected fields in later aggregation stages are valid only for the field names of included fields or their aliases (including newly defined fields and their aliases). Fields not included in the projection cannot be referenced in later aggregation stages. The following listings show examples of projection expression: + +.Projection expression examples +==== +[source,java] +---- +// generates {$project: {name: 1, netPrice: 1}} +project("name", "netPrice") + +// generates {$project: {thing1: $thing2}} +project().and("thing1").as("thing2") + +// generates {$project: {a: 1, b: 1, thing2: $thing1}} +project("a","b").and("thing1").as("thing2") +---- +==== + +.Multi-Stage Aggregation using Projection and Sorting +==== +[source,java] +---- +// generates {$project: {name: 1, netPrice: 1}}, {$sort: {name: 1}} +project("name", "netPrice"), sort(ASC, "name") + +// generates {$project: {name: $firstname}}, {$sort: {name: 1}} +project().and("firstname").as("name"), sort(ASC, "name") + +// does not work +project().and("firstname").as("name"), sort(ASC, "firstname") +---- +==== + +More examples for project operations can be found in the `AggregationTests` class. Note that further details regarding the projection expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/project/#pipe._S_project[corresponding section] of the MongoDB Aggregation Framework reference documentation. + +[[mongo.aggregation.facet]] +=== Faceted Classification + +As of Version 3.4, MongoDB supports faceted classification by using the Aggregation Framework. A faceted classification uses semantic categories (either general or subject-specific) that are combined to create the full classification entry. Documents flowing through the aggregation pipeline are classified into buckets. A multi-faceted classification enables various aggregations on the same set of input documents, without needing to retrieve the input documents multiple times. + +==== Buckets + +Bucket operations categorize incoming documents into groups, called buckets, based on a specified expression and bucket boundaries. Bucket operations require a grouping field or a grouping expression. You can define them by using the `bucket()` and `bucketAuto()` methods of the `Aggregate` class. `BucketOperation` and `BucketAutoOperation` can expose accumulations based on aggregation expressions for input documents. You can extend the bucket operation with additional parameters through a fluent API by using the `with…()` methods and the `andOutput(String)` method. You can alias the operation by using the `as(String)` method. Each bucket is represented as a document in the output. + +`BucketOperation` takes a defined set of boundaries to group incoming documents into these categories. Boundaries are required to be sorted. The following listing shows some examples of bucket operations: + +.Bucket operation examples +==== +[source,java] +---- +// generates {$bucket: {groupBy: $price, boundaries: [0, 100, 400]}} +bucket("price").withBoundaries(0, 100, 400); + +// generates {$bucket: {groupBy: $price, default: "Other" boundaries: [0, 100]}} +bucket("price").withBoundaries(0, 100).withDefault("Other"); + +// generates {$bucket: {groupBy: $price, boundaries: [0, 100], output: { count: { $sum: 1}}}} +bucket("price").withBoundaries(0, 100).andOutputCount().as("count"); + +// generates {$bucket: {groupBy: $price, boundaries: [0, 100], 5, output: { titles: { $push: "$title"}}} +bucket("price").withBoundaries(0, 100).andOutput("title").push().as("titles"); +---- +==== + +`BucketAutoOperation` determines boundaries in an attempt to evenly distribute documents into a specified number of buckets. `BucketAutoOperation` optionally takes a granularity value that specifies the https://en.wikipedia.org/wiki/Preferred_number[preferred number] series to use to ensure that the calculated boundary edges end on preferred round numbers or on powers of 10. The following listing shows examples of bucket operations: + +.Bucket operation examples +==== +[source,java] +---- +// generates {$bucketAuto: {groupBy: $price, buckets: 5}} +bucketAuto("price", 5) + +// generates {$bucketAuto: {groupBy: $price, buckets: 5, granularity: "E24"}} +bucketAuto("price", 5).withGranularity(Granularities.E24).withDefault("Other"); + +// generates {$bucketAuto: {groupBy: $price, buckets: 5, output: { titles: { $push: "$title"}}} +bucketAuto("price", 5).andOutput("title").push().as("titles"); +---- +==== + +To create output fields in buckets, bucket operations can use `AggregationExpression` through `andOutput()` and <> through `andOutputExpression()`. + +Note that further details regarding bucket expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/bucket/[`$bucket` section] and +https://docs.mongodb.org/manual/reference/operator/aggregation/bucketAuto/[`$bucketAuto` section] of the MongoDB Aggregation Framework reference documentation. + +==== Multi-faceted Aggregation + +Multiple aggregation pipelines can be used to create multi-faceted aggregations that characterize data across multiple dimensions (or facets) within a single aggregation stage. Multi-faceted aggregations provide multiple filters and categorizations to guide data browsing and analysis. A common implementation of faceting is how many online retailers provide ways to narrow down search results by applying filters on product price, manufacturer, size, and other factors. + +You can define a `FacetOperation` by using the `facet()` method of the `Aggregation` class. You can customize it with multiple aggregation pipelines by using the `and()` method. Each sub-pipeline has its own field in the output document where its results are stored as an array of documents. + +Sub-pipelines can project and filter input documents prior to grouping. Common use cases include extraction of date parts or calculations before categorization. The following listing shows facet operation examples: + +.Facet operation examples +==== +[source,java] +---- +// generates {$facet: {categorizedByPrice: [ { $match: { price: {$exists : true}}}, { $bucketAuto: {groupBy: $price, buckets: 5}}]}} +facet(match(Criteria.where("price").exists(true)), bucketAuto("price", 5)).as("categorizedByPrice")) + +// generates {$facet: {categorizedByCountry: [ { $match: { country: {$exists : true}}}, { $sortByCount: "$country"}]}} +facet(match(Criteria.where("country").exists(true)), sortByCount("country")).as("categorizedByCountry")) + +// generates {$facet: {categorizedByYear: [ +// { $project: { title: 1, publicationYear: { $year: "publicationDate"}}}, +// { $bucketAuto: {groupBy: $price, buckets: 5, output: { titles: {$push:"$title"}}} +// ]}} +facet(project("title").and("publicationDate").extractYear().as("publicationYear"), + bucketAuto("publicationYear", 5).andOutput("title").push().as("titles")) + .as("categorizedByYear")) +---- +==== + +Note that further details regarding facet operation can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/facet/[`$facet` section] of the MongoDB Aggregation Framework reference documentation. + +[[mongo.aggregation.sort-by-count]] +==== Sort By Count + +Sort by count operations group incoming documents based on the value of a specified expression, compute the count of documents in each distinct group, and sort the results by count. It offers a handy shortcut to apply sorting when using <>. Sort by count operations require a grouping field or grouping expression. The following listing shows a sort by count example: + +.Sort by count example +==== +[source,java] +---- +// generates { $sortByCount: "$country" } +sortByCount("country"); +---- +==== + +A sort by count operation is equivalent to the following BSON (Binary JSON): + +---- +{ $group: { _id: , count: { $sum: 1 } } }, +{ $sort: { count: -1 } } +---- + +[[mongo.aggregation.projection.expressions]] +==== Spring Expression Support in Projection Expressions + +We support the use of SpEL expressions in projection expressions through the `andExpression` method of the `ProjectionOperation` and `BucketOperation` classes. This feature lets you define the desired expression as a SpEL expression. On running a query, the SpEL expression is translated into a corresponding MongoDB projection expression part. This arrangement makes it much easier to express complex calculations. + +===== Complex Calculations with SpEL expressions + +Consider the following SpEL expression: + +[source,java] +---- +1 + (q + 1) / (q - 1) +---- + +The preceding expression is translated into the following projection expression part: + +[source,javascript] +---- +{ "$add" : [ 1, { + "$divide" : [ { + "$add":["$q", 1]}, { + "$subtract":[ "$q", 1]} + ] +}]} +---- + +You can see examples in more context in <> and <>. You can find more usage examples for supported SpEL expression constructs in `SpelExpressionTransformerUnitTests`. The following table shows the SpEL transformations supported by Spring Data MongoDB: + +.Supported SpEL transformations +[%header,cols="2"] +|=== +| SpEL Expression +| Mongo Expression Part +| a == b +| { $eq : [$a, $b] } +| a != b +| { $ne : [$a , $b] } +| a > b +| { $gt : [$a, $b] } +| a >= b +| { $gte : [$a, $b] } +| a < b +| { $lt : [$a, $b] } +| a <= b +| { $lte : [$a, $b] } +| a + b +| { $add : [$a, $b] } +| a - b +| { $subtract : [$a, $b] } +| a * b +| { $multiply : [$a, $b] } +| a / b +| { $divide : [$a, $b] } +| a^b +| { $pow : [$a, $b] } +| a % b +| { $mod : [$a, $b] } +| a && b +| { $and : [$a, $b] } +| a \|\| b +| { $or : [$a, $b] } +| !a +| { $not : [$a] } +|=== + +In addition to the transformations shown in the preceding table, you can use standard SpEL operations such as `new` to (for example) create arrays and reference expressions through their names (followed by the arguments to use in brackets). The following example shows how to create an array in this fashion: + +[source,java] +---- +// { $setEquals : [$a, [5, 8, 13] ] } +.andExpression("setEquals(a, new int[]{5, 8, 13})"); +---- + +[[mongo.aggregation.examples]] +==== Aggregation Framework Examples + +The examples in this section demonstrate the usage patterns for the MongoDB Aggregation Framework with Spring Data MongoDB. + +[[mongo.aggregation.examples.example1]] +===== Aggregation Framework Example 1 + +In this introductory example, we want to aggregate a list of tags to get the occurrence count of a particular tag from a MongoDB collection (called `tags`) sorted by the occurrence count in descending order. This example demonstrates the usage of grouping, sorting, projections (selection), and unwinding (result splitting). + +[source,java] +---- +class TagCount { + String tag; + int n; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +Aggregation agg = newAggregation( + project("tags"), + unwind("tags"), + group("tags").count().as("n"), + project("n").and("tag").previousOperation(), + sort(DESC, "n") +); + +AggregationResults results = mongoTemplate.aggregate(agg, "tags", TagCount.class); +List tagCount = results.getMappedResults(); +---- + +The preceding listing uses the following algorithm: + +. Create a new aggregation by using the `newAggregation` static factory method, to which we pass a list of aggregation operations. These aggregate operations define the aggregation pipeline of our `Aggregation`. +. Use the `project` operation to select the `tags` field (which is an array of strings) from the input collection. +. Use the `unwind` operation to generate a new document for each tag within the `tags` array. +. Use the `group` operation to define a group for each `tags` value for which we aggregate the occurrence count (by using the `count` aggregation operator and collecting the result in a new field called `n`). +. Select the `n` field and create an alias for the ID field generated from the previous group operation (hence the call to `previousOperation()`) with a name of `tag`. +. Use the `sort` operation to sort the resulting list of tags by their occurrence count in descending order. +. Call the `aggregate` method on `MongoTemplate` to let MongoDB perform the actual aggregation operation, with the created `Aggregation` as an argument. + +Note that the input collection is explicitly specified as the `tags` parameter to the `aggregate` Method. If the name of the input collection is not specified explicitly, it is derived from the input class passed as the first parameter to the `newAggreation` method. + +[[mongo.aggregation.examples.example2]] +===== Aggregation Framework Example 2 + +This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state[Largest and Smallest Cities by State] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return the smallest and largest cities by population for each state by using the aggregation framework. This example demonstrates grouping, sorting, and projections (selection). + +[source,java] +---- +class ZipInfo { + String id; + String city; + String state; + @Field("pop") int population; + @Field("loc") double[] location; +} + +class City { + String name; + int population; +} + +class ZipInfoStats { + String id; + String state; + City biggestCity; + City smallestCity; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation aggregation = newAggregation(ZipInfo.class, + group("state", "city") + .sum("population").as("pop"), + sort(ASC, "pop", "state", "city"), + group("state") + .last("city").as("biggestCity") + .last("pop").as("biggestPop") + .first("city").as("smallestCity") + .first("pop").as("smallestPop"), + project() + .and("state").previousOperation() + .and("biggestCity") + .nested(bind("name", "biggestCity").and("population", "biggestPop")) + .and("smallestCity") + .nested(bind("name", "smallestCity").and("population", "smallestPop")), + sort(ASC, "state") +); + +AggregationResults result = mongoTemplate.aggregate(aggregation, ZipInfoStats.class); +ZipInfoStats firstZipInfoStats = result.getMappedResults().get(0); +---- + +Note that the `ZipInfo` class maps the structure of the given input-collection. The `ZipInfoStats` class defines the structure in the desired output format. + +The preceding listings use the following algorithm: + +. Use the `group` operation to define a group from the input-collection. The grouping criteria is the combination of the `state` and `city` fields, which forms the ID structure of the group. We aggregate the value of the `population` property from the grouped elements by using the `sum` operator and save the result in the `pop` field. +. Use the `sort` operation to sort the intermediate-result by the `pop`, `state` and `city` fields, in ascending order, such that the smallest city is at the top and the biggest city is at the bottom of the result. Note that the sorting on `state` and `city` is implicitly performed against the group ID fields (which Spring Data MongoDB handled). +. Use a `group` operation again to group the intermediate result by `state`. Note that `state` again implicitly references a group ID field. We select the name and the population count of the biggest and smallest city with calls to the `last(…)` and `first(...)` operators, respectively, in the `project` operation. +. Select the `state` field from the previous `group` operation. Note that `state` again implicitly references a group ID field. Because we do not want an implicitly generated ID to appear, we exclude the ID from the previous operation by using `and(previousOperation()).exclude()`. Because we want to populate the nested `City` structures in our output class, we have to emit appropriate sub-documents by using the nested method. +. Sort the resulting list of `StateStats` by their state name in ascending order in the `sort` operation. + +Note that we derive the name of the input collection from the `ZipInfo` class passed as the first parameter to the `newAggregation` method. + +[[mongo.aggregation.examples.example3]] +===== Aggregation Framework Example 3 + +This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#states-with-populations-over-10-million[States with Populations Over 10 Million] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return all states with a population greater than 10 million, using the aggregation framework. This example demonstrates grouping, sorting, and matching (filtering). + +[source,java] +---- +class StateStats { + @Id String id; + String state; + @Field("totalPop") int totalPopulation; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation agg = newAggregation(ZipInfo.class, + group("state").sum("population").as("totalPop"), + sort(ASC, previousOperation(), "totalPop"), + match(where("totalPop").gte(10 * 1000 * 1000)) +); + +AggregationResults result = mongoTemplate.aggregate(agg, StateStats.class); +List stateStatsList = result.getMappedResults(); +---- + +The preceding listings use the following algorithm: + +. Group the input collection by the `state` field and calculate the sum of the `population` field and store the result in the new field `"totalPop"`. +. Sort the intermediate result by the id-reference of the previous group operation in addition to the `"totalPop"` field in ascending order. +. Filter the intermediate result by using a `match` operation which accepts a `Criteria` query as an argument. + +Note that we derive the name of the input collection from the `ZipInfo` class passed as first parameter to the `newAggregation` method. + +[[mongo.aggregation.examples.example4]] +===== Aggregation Framework Example 4 + +This example demonstrates the use of simple arithmetic operations in the projection operation. + +[source,java] +---- +class Product { + String id; + String name; + double netPrice; + int spaceUnits; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation agg = newAggregation(Product.class, + project("name", "netPrice") + .and("netPrice").plus(1).as("netPricePlus1") + .and("netPrice").minus(1).as("netPriceMinus1") + .and("netPrice").multiply(1.19).as("grossPrice") + .and("netPrice").divide(2).as("netPriceDiv2") + .and("spaceUnits").mod(2).as("spaceUnitsMod2") +); + +AggregationResults result = mongoTemplate.aggregate(agg, Document.class); +List resultList = result.getMappedResults(); +---- + +Note that we derive the name of the input collection from the `Product` class passed as first parameter to the `newAggregation` method. + +[[mongo.aggregation.examples.example5]] +===== Aggregation Framework Example 5 + +This example demonstrates the use of simple arithmetic operations derived from SpEL Expressions in the projection operation. + +[source,java] +---- +class Product { + String id; + String name; + double netPrice; + int spaceUnits; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation agg = newAggregation(Product.class, + project("name", "netPrice") + .andExpression("netPrice + 1").as("netPricePlus1") + .andExpression("netPrice - 1").as("netPriceMinus1") + .andExpression("netPrice / 2").as("netPriceDiv2") + .andExpression("netPrice * 1.19").as("grossPrice") + .andExpression("spaceUnits % 2").as("spaceUnitsMod2") + .andExpression("(netPrice * 0.8 + 1.2) * 1.19").as("grossPriceIncludingDiscountAndCharge") + +); + +AggregationResults result = mongoTemplate.aggregate(agg, Document.class); +List resultList = result.getMappedResults(); +---- + +[[mongo.aggregation.examples.example6]] +===== Aggregation Framework Example 6 + +This example demonstrates the use of complex arithmetic operations derived from SpEL Expressions in the projection operation. + +Note: The additional parameters passed to the `addExpression` method can be referenced with indexer expressions according to their position. In this example, we reference the first parameter of the parameters array with `[0]`. When the SpEL expression is transformed into a MongoDB aggregation framework expression, external parameter expressions are replaced with their respective values. + +[source,java] +---- +class Product { + String id; + String name; + double netPrice; + int spaceUnits; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +double shippingCosts = 1.2; + +TypedAggregation agg = newAggregation(Product.class, + project("name", "netPrice") + .andExpression("(netPrice * (1-discountRate) + [0]) * (1+taxRate)", shippingCosts).as("salesPrice") +); + +AggregationResults result = mongoTemplate.aggregate(agg, Document.class); +List resultList = result.getMappedResults(); +---- + +Note that we can also refer to other fields of the document within the SpEL expression. + +[[mongo.aggregation.examples.example7]] +===== Aggregation Framework Example 7 + +This example uses conditional projection. It is derived from the https://docs.mongodb.com/manual/reference/operator/aggregation/cond/[$cond reference documentation]. + +[source,java] +---- +public class InventoryItem { + + @Id int id; + String item; + String description; + int qty; +} + +public class InventoryItemProjection { + + @Id int id; + String item; + String description; + int qty; + int discount +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation agg = newAggregation(InventoryItem.class, + project("item").and("discount") + .applyCondition(ConditionalOperator.newBuilder().when(Criteria.where("qty").gte(250)) + .then(30) + .otherwise(20)) + .and(ifNull("description", "Unspecified")).as("description") +); + +AggregationResults result = mongoTemplate.aggregate(agg, "inventory", InventoryItemProjection.class); +List stateStatsList = result.getMappedResults(); +---- + +This one-step aggregation uses a projection operation with the `inventory` collection. We project the `discount` field by using a conditional operation for all inventory items that have a `qty` greater than or equal to `250`. A second conditional projection is performed for the `description` field. We apply the `Unspecified` description to all items that either do not have a `description` field or items that have a `null` description. + +As of MongoDB 3.6, it is possible to exclude fields from the projection by using a conditional expression. + +.Conditional aggregation projection +==== +[source,java] +---- +TypedAggregation agg = Aggregation.newAggregation(Book.class, + project("title") + .and(ConditionalOperators.when(ComparisonOperators.valueOf("author.middle") <1> + .equalToValue("")) <2> + .then("$$REMOVE") <3> + .otherwiseValueOf("author.middle") <4> + ) + .as("author.middle")); +---- +<1> If the value of the field `author.middle` +<2> does not contain a value, +<3> then use https://docs.mongodb.com/manual/reference/aggregation-variables/#variable.REMOVE[``$$REMOVE``] to exclude the field. +<4> Otherwise, add the field value of `author.middle`. +==== diff --git a/src/main/asciidoc/reference/gridfs.adoc b/src/main/asciidoc/reference/gridfs.adoc new file mode 100644 index 0000000000..94caf11f47 --- /dev/null +++ b/src/main/asciidoc/reference/gridfs.adoc @@ -0,0 +1,115 @@ +[[gridfs]] +== GridFS Support + +MongoDB supports storing binary files inside its filesystem, GridFS. Spring Data MongoDB provides a `GridFsOperations` interface as well as the corresponding implementation, `GridFsTemplate`, to let you interact with the filesystem. You can set up a `GridFsTemplate` instance by handing it a `MongoDatabaseFactory` as well as a `MongoConverter`, as the following example shows: + +.JavaConfig setup for a GridFsTemplate +==== +[source,java] +---- +class GridFsConfiguration extends AbstractMongoClientConfiguration { + + // … further configuration omitted + + @Bean + public GridFsTemplate gridFsTemplate() { + return new GridFsTemplate(mongoDbFactory(), mappingMongoConverter()); + } +} +---- +==== + +The corresponding XML configuration follows: + +.XML configuration for a GridFsTemplate +==== +[source,xml] +---- + + + + + + + + + + + + +---- +==== + +The template can now be injected and used to perform storage and retrieval operations, as the following example shows: + +.Using GridFsTemplate to store files +==== +[source,java] +---- +class GridFsClient { + + @Autowired + GridFsOperations operations; + + @Test + public void storeFileToGridFs() { + + FileMetadata metadata = new FileMetadata(); + // populate metadata + Resource file = … // lookup File or Resource + + operations.store(file.getInputStream(), "filename.txt", metadata); + } +} +---- +==== + +The `store(…)` operations take an `InputStream`, a filename, and (optionally) metadata information about the file to store. The metadata can be an arbitrary object, which will be marshaled by the `MongoConverter` configured with the `GridFsTemplate`. Alternatively, you can also provide a `Document`. + +You can read files from the filesystem through either the `find(…)` or the `getResources(…)` methods. Let's have a look at the `find(…)` methods first. You can either find a single file or multiple files that match a `Query`. You can use the `GridFsCriteria` helper class to define queries. It provides static factory methods to encapsulate default metadata fields (such as `whereFilename()` and `whereContentType()`) or a custom one through `whereMetaData()`. The following example shows how to use `GridFsTemplate` to query for files: + +.Using GridFsTemplate to query for files +==== +[source,java] +---- +class GridFsClient { + + @Autowired + GridFsOperations operations; + + @Test + public void findFilesInGridFs() { + GridFSFindIterable result = operations.find(query(whereFilename().is("filename.txt"))) + } +} +---- +==== + +NOTE: Currently, MongoDB does not support defining sort criteria when retrieving files from GridFS. For this reason, any sort criteria defined on the `Query` instance handed into the `find(…)` method are disregarded. + +The other option to read files from the GridFs is to use the methods introduced by the `ResourcePatternResolver` interface. They allow handing an Ant path into the method and can thus retrieve files matching the given pattern. The following example shows how to use `GridFsTemplate` to read files: + +.Using GridFsTemplate to read files +==== +[source,java] +---- +class GridFsClient { + + @Autowired + GridFsOperations operations; + + @Test + public void readFilesFromGridFs() { + GridFsResources[] txtFiles = operations.getResources("*.txt"); + } +} +---- +==== + +`GridFsOperations` extends `ResourcePatternResolver` and lets the `GridFsTemplate` (for example) to be plugged into an `ApplicationContext` to read Spring Config files from MongoDB database. diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc index daaad49963..f214edba4c 100644 --- a/src/main/asciidoc/reference/mongodb.adoc +++ b/src/main/asciidoc/reference/mongodb.adoc @@ -1002,7 +1002,7 @@ assertThat(upserted.getFirstName()).isEqualTo("Mary"); assertThat(upserted.getAge()).isOne(); ---- -= [[mongo-template.aggregation-update]] +[[mongo-template.aggregation-update]] === Aggregation Pipeline Updates Update methods exposed by `MongoOperations` and `ReactiveMongoOperations` also accept an <> via `AggregationUpdate`. @@ -2419,662 +2419,7 @@ GroupByResults results = mongoTemplate.group(where("x").gt(0), keyFunction("classpath:keyFunction.js").initialDocument("{ count: 0 }").reduceFunction("classpath:groupReduce.js"), XObject.class); ---- -[[mongo.aggregation]] -== Aggregation Framework Support - -Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2. - -For further information, see the full https://docs.mongodb.org/manual/aggregation/[reference documentation] of the aggregation framework and other data aggregation tools for MongoDB. - -[[mongo.aggregation.basic-concepts]] -=== Basic Concepts - -The Aggregation Framework support in Spring Data MongoDB is based on the following key abstractions: `Aggregation`, `AggregationDefinition`, and `AggregationResults`. - -* `Aggregation` -+ -An `Aggregation` represents a MongoDB `aggregate` operation and holds the description of the aggregation pipeline instructions. Aggregations are created by invoking the appropriate `newAggregation(…)` static factory method of the `Aggregation` class, which takes a list of `AggregateOperation` and an optional input class. -+ -The actual aggregate operation is run by the `aggregate` method of the `MongoTemplate`, which takes the desired output class as a parameter. -+ -* `TypedAggregation` -+ -A `TypedAggregation`, just like an `Aggregation`, holds the instructions of the aggregation pipeline and a reference to the input type, that is used for mapping domain properties to actual document fields. -+ -At runtime, field references get checked against the given input type, considering potential `@Field` annotations. -[NOTE] -==== -Changed in 3.2 referencing non-existent properties does no longer raise errors. To restore the previous behaviour use the `strictMapping` option of `AggregationOptions`. -==== -* `AggregationDefinition` -+ -An `AggregationDefinition` represents a MongoDB aggregation pipeline operation and describes the processing that should be performed in this aggregation step. Although you could manually create an `AggregationDefinition`, we recommend using the static factory methods provided by the `Aggregate` class to construct an `AggregateOperation`. -+ -* `AggregationResults` -+ -`AggregationResults` is the container for the result of an aggregate operation. It provides access to the raw aggregation result, in the form of a `Document` to the mapped objects and other information about the aggregation. -+ -The following listing shows the canonical example for using the Spring Data MongoDB support for the MongoDB Aggregation Framework: -+ -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -Aggregation agg = newAggregation( - pipelineOP1(), - pipelineOP2(), - pipelineOPn() -); - -AggregationResults results = mongoTemplate.aggregate(agg, "INPUT_COLLECTION_NAME", OutputType.class); -List mappedResult = results.getMappedResults(); ----- - -Note that, if you provide an input class as the first parameter to the `newAggregation` method, the `MongoTemplate` derives the name of the input collection from this class. Otherwise, if you do not not specify an input class, you must provide the name of the input collection explicitly. If both an input class and an input collection are provided, the latter takes precedence. - -[[mongo.aggregation.supported-aggregation-operations]] -=== Supported Aggregation Operations - -The MongoDB Aggregation Framework provides the following types of aggregation operations: - -* Pipeline Aggregation Operators -* Group Aggregation Operators -* Boolean Aggregation Operators -* Comparison Aggregation Operators -* Arithmetic Aggregation Operators -* String Aggregation Operators -* Date Aggregation Operators -* Array Aggregation Operators -* Conditional Aggregation Operators -* Lookup Aggregation Operators -* Convert Aggregation Operators -* Object Aggregation Operators -* Script Aggregation Operators - -At the time of this writing, we provide support for the following Aggregation Operations in Spring Data MongoDB: - -.Aggregation Operations currently supported by Spring Data MongoDB -[cols="2*"] -|=== -| Pipeline Aggregation Operators -| `bucket`, `bucketAuto`, `count`, `facet`, `geoNear`, `graphLookup`, `group`, `limit`, `lookup`, `match`, `project`, `replaceRoot`, `skip`, `sort`, `unwind` - -| Set Aggregation Operators -| `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue` - -| Group/Accumulator Aggregation Operators -| `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp` - -| Arithmetic Aggregation Operators -| `abs`, `add` (via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (*via `minus`), `trunc` - -| Document Operators -| `rank`, `denseRank`, `documentNumber`, `shift` - -| String Aggregation Operators -| `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` - -| Comparison Aggregation Operators -| `eq` (via: `is`), `gt`, `gte`, `lt`, `lte`, `ne` - -| Array Aggregation Operators -| `arrayElementAt`, `arrayToObject`, `concatArrays`, `filter`, `in`, `indexOfArray`, `isArray`, `range`, `reverseArray`, `reduce`, `size`, `slice`, `zip` - -| Literal Operators -| `literal` - -| Date Aggregation Operators -| `dayOfYear`, `dayOfMonth`, `dayOfWeek`, `year`, `month`, `week`, `hour`, `minute`, `second`, `millisecond`, `dateToString`, `dateFromString`, `dateFromParts`, `dateToParts`, `isoDayOfWeek`, `isoWeek`, `isoWeekYear` - -| Variable Operators -| `map` - -| Conditional Aggregation Operators -| `cond`, `ifNull`, `switch` - -| Type Aggregation Operators -| `type` - -| Convert Aggregation Operators -| `convert`, `toBool`, `toDate`, `toDecimal`, `toDouble`, `toInt`, `toLong`, `toObjectId`, `toString` - -| Object Aggregation Operators -| `objectToArray`, `mergeObjects` - -| Script Aggregation Operators -| `function`, `accumulator` -|=== - -* The operation is mapped or added by Spring Data MongoDB. - -Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB.Comparison aggregation operators are expressed as `Criteria` expressions. - -[[mongo.aggregation.projection]] -=== Projection Expressions - -Projection expressions are used to define the fields that are the outcome of a particular aggregation step. Projection expressions can be defined through the `project` method of the `Aggregation` class, either by passing a list of `String` objects or an aggregation framework `Fields` object. The projection can be extended with additional fields through a fluent API by using the `and(String)` method and aliased by using the `as(String)` method. -Note that you can also define fields with aliases by using the `Fields.field` static factory method of the aggregation framework, which you can then use to construct a new `Fields` instance. References to projected fields in later aggregation stages are valid only for the field names of included fields or their aliases (including newly defined fields and their aliases). Fields not included in the projection cannot be referenced in later aggregation stages. The following listings show examples of projection expression: - -.Projection expression examples -==== -[source,java] ----- -// generates {$project: {name: 1, netPrice: 1}} -project("name", "netPrice") - -// generates {$project: {thing1: $thing2}} -project().and("thing1").as("thing2") - -// generates {$project: {a: 1, b: 1, thing2: $thing1}} -project("a","b").and("thing1").as("thing2") ----- -==== - -.Multi-Stage Aggregation using Projection and Sorting -==== -[source,java] ----- -// generates {$project: {name: 1, netPrice: 1}}, {$sort: {name: 1}} -project("name", "netPrice"), sort(ASC, "name") - -// generates {$project: {name: $firstname}}, {$sort: {name: 1}} -project().and("firstname").as("name"), sort(ASC, "name") - -// does not work -project().and("firstname").as("name"), sort(ASC, "firstname") ----- -==== - -More examples for project operations can be found in the `AggregationTests` class. Note that further details regarding the projection expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/project/#pipe._S_project[corresponding section] of the MongoDB Aggregation Framework reference documentation. - -[[mongo.aggregation.facet]] -=== Faceted Classification - -As of Version 3.4, MongoDB supports faceted classification by using the Aggregation Framework. A faceted classification uses semantic categories (either general or subject-specific) that are combined to create the full classification entry. Documents flowing through the aggregation pipeline are classified into buckets. A multi-faceted classification enables various aggregations on the same set of input documents, without needing to retrieve the input documents multiple times. - -==== Buckets - -Bucket operations categorize incoming documents into groups, called buckets, based on a specified expression and bucket boundaries. Bucket operations require a grouping field or a grouping expression. You can define them by using the `bucket()` and `bucketAuto()` methods of the `Aggregate` class. `BucketOperation` and `BucketAutoOperation` can expose accumulations based on aggregation expressions for input documents. You can extend the bucket operation with additional parameters through a fluent API by using the `with…()` methods and the `andOutput(String)` method. You can alias the operation by using the `as(String)` method. Each bucket is represented as a document in the output. - -`BucketOperation` takes a defined set of boundaries to group incoming documents into these categories. Boundaries are required to be sorted. The following listing shows some examples of bucket operations: - -.Bucket operation examples -==== -[source,java] ----- -// generates {$bucket: {groupBy: $price, boundaries: [0, 100, 400]}} -bucket("price").withBoundaries(0, 100, 400); - -// generates {$bucket: {groupBy: $price, default: "Other" boundaries: [0, 100]}} -bucket("price").withBoundaries(0, 100).withDefault("Other"); - -// generates {$bucket: {groupBy: $price, boundaries: [0, 100], output: { count: { $sum: 1}}}} -bucket("price").withBoundaries(0, 100).andOutputCount().as("count"); - -// generates {$bucket: {groupBy: $price, boundaries: [0, 100], 5, output: { titles: { $push: "$title"}}} -bucket("price").withBoundaries(0, 100).andOutput("title").push().as("titles"); ----- -==== - -`BucketAutoOperation` determines boundaries in an attempt to evenly distribute documents into a specified number of buckets. `BucketAutoOperation` optionally takes a granularity value that specifies the https://en.wikipedia.org/wiki/Preferred_number[preferred number] series to use to ensure that the calculated boundary edges end on preferred round numbers or on powers of 10. The following listing shows examples of bucket operations: - -.Bucket operation examples -==== -[source,java] ----- -// generates {$bucketAuto: {groupBy: $price, buckets: 5}} -bucketAuto("price", 5) - -// generates {$bucketAuto: {groupBy: $price, buckets: 5, granularity: "E24"}} -bucketAuto("price", 5).withGranularity(Granularities.E24).withDefault("Other"); - -// generates {$bucketAuto: {groupBy: $price, buckets: 5, output: { titles: { $push: "$title"}}} -bucketAuto("price", 5).andOutput("title").push().as("titles"); ----- -==== - -To create output fields in buckets, bucket operations can use `AggregationExpression` through `andOutput()` and <> through `andOutputExpression()`. - -Note that further details regarding bucket expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/bucket/[`$bucket` section] and -https://docs.mongodb.org/manual/reference/operator/aggregation/bucketAuto/[`$bucketAuto` section] of the MongoDB Aggregation Framework reference documentation. - -==== Multi-faceted Aggregation - -Multiple aggregation pipelines can be used to create multi-faceted aggregations that characterize data across multiple dimensions (or facets) within a single aggregation stage. Multi-faceted aggregations provide multiple filters and categorizations to guide data browsing and analysis. A common implementation of faceting is how many online retailers provide ways to narrow down search results by applying filters on product price, manufacturer, size, and other factors. - -You can define a `FacetOperation` by using the `facet()` method of the `Aggregation` class. You can customize it with multiple aggregation pipelines by using the `and()` method. Each sub-pipeline has its own field in the output document where its results are stored as an array of documents. - -Sub-pipelines can project and filter input documents prior to grouping. Common use cases include extraction of date parts or calculations before categorization. The following listing shows facet operation examples: - -.Facet operation examples -==== -[source,java] ----- -// generates {$facet: {categorizedByPrice: [ { $match: { price: {$exists : true}}}, { $bucketAuto: {groupBy: $price, buckets: 5}}]}} -facet(match(Criteria.where("price").exists(true)), bucketAuto("price", 5)).as("categorizedByPrice")) - -// generates {$facet: {categorizedByCountry: [ { $match: { country: {$exists : true}}}, { $sortByCount: "$country"}]}} -facet(match(Criteria.where("country").exists(true)), sortByCount("country")).as("categorizedByCountry")) - -// generates {$facet: {categorizedByYear: [ -// { $project: { title: 1, publicationYear: { $year: "publicationDate"}}}, -// { $bucketAuto: {groupBy: $price, buckets: 5, output: { titles: {$push:"$title"}}} -// ]}} -facet(project("title").and("publicationDate").extractYear().as("publicationYear"), - bucketAuto("publicationYear", 5).andOutput("title").push().as("titles")) - .as("categorizedByYear")) ----- -==== - -Note that further details regarding facet operation can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/facet/[`$facet` section] of the MongoDB Aggregation Framework reference documentation. - -[[mongo.aggregation.sort-by-count]] -==== Sort By Count - -Sort by count operations group incoming documents based on the value of a specified expression, compute the count of documents in each distinct group, and sort the results by count. It offers a handy shortcut to apply sorting when using <>. Sort by count operations require a grouping field or grouping expression. The following listing shows a sort by count example: - -.Sort by count example -==== -[source,java] ----- -// generates { $sortByCount: "$country" } -sortByCount("country"); ----- -==== - -A sort by count operation is equivalent to the following BSON (Binary JSON): - ----- -{ $group: { _id: , count: { $sum: 1 } } }, -{ $sort: { count: -1 } } ----- - -[[mongo.aggregation.projection.expressions]] -==== Spring Expression Support in Projection Expressions - -We support the use of SpEL expressions in projection expressions through the `andExpression` method of the `ProjectionOperation` and `BucketOperation` classes. This feature lets you define the desired expression as a SpEL expression. On running a query, the SpEL expression is translated into a corresponding MongoDB projection expression part. This arrangement makes it much easier to express complex calculations. - -===== Complex Calculations with SpEL expressions - -Consider the following SpEL expression: - -[source,java] ----- -1 + (q + 1) / (q - 1) ----- - -The preceding expression is translated into the following projection expression part: - -[source,javascript] ----- -{ "$add" : [ 1, { - "$divide" : [ { - "$add":["$q", 1]}, { - "$subtract":[ "$q", 1]} - ] -}]} ----- - -You can see examples in more context in <> and <>. You can find more usage examples for supported SpEL expression constructs in `SpelExpressionTransformerUnitTests`. The following table shows the SpEL transformations supported by Spring Data MongoDB: - -.Supported SpEL transformations -[%header,cols="2"] -|=== -| SpEL Expression -| Mongo Expression Part -| a == b -| { $eq : [$a, $b] } -| a != b -| { $ne : [$a , $b] } -| a > b -| { $gt : [$a, $b] } -| a >= b -| { $gte : [$a, $b] } -| a < b -| { $lt : [$a, $b] } -| a <= b -| { $lte : [$a, $b] } -| a + b -| { $add : [$a, $b] } -| a - b -| { $subtract : [$a, $b] } -| a * b -| { $multiply : [$a, $b] } -| a / b -| { $divide : [$a, $b] } -| a^b -| { $pow : [$a, $b] } -| a % b -| { $mod : [$a, $b] } -| a && b -| { $and : [$a, $b] } -| a \|\| b -| { $or : [$a, $b] } -| !a -| { $not : [$a] } -|=== - -In addition to the transformations shown in the preceding table, you can use standard SpEL operations such as `new` to (for example) create arrays and reference expressions through their names (followed by the arguments to use in brackets). The following example shows how to create an array in this fashion: - -[source,java] ----- -// { $setEquals : [$a, [5, 8, 13] ] } -.andExpression("setEquals(a, new int[]{5, 8, 13})"); ----- - -[[mongo.aggregation.examples]] -==== Aggregation Framework Examples - -The examples in this section demonstrate the usage patterns for the MongoDB Aggregation Framework with Spring Data MongoDB. - -[[mongo.aggregation.examples.example1]] -===== Aggregation Framework Example 1 - -In this introductory example, we want to aggregate a list of tags to get the occurrence count of a particular tag from a MongoDB collection (called `tags`) sorted by the occurrence count in descending order. This example demonstrates the usage of grouping, sorting, projections (selection), and unwinding (result splitting). - -[source,java] ----- -class TagCount { - String tag; - int n; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -Aggregation agg = newAggregation( - project("tags"), - unwind("tags"), - group("tags").count().as("n"), - project("n").and("tag").previousOperation(), - sort(DESC, "n") -); - -AggregationResults results = mongoTemplate.aggregate(agg, "tags", TagCount.class); -List tagCount = results.getMappedResults(); ----- - -The preceding listing uses the following algorithm: - -. Create a new aggregation by using the `newAggregation` static factory method, to which we pass a list of aggregation operations. These aggregate operations define the aggregation pipeline of our `Aggregation`. -. Use the `project` operation to select the `tags` field (which is an array of strings) from the input collection. -. Use the `unwind` operation to generate a new document for each tag within the `tags` array. -. Use the `group` operation to define a group for each `tags` value for which we aggregate the occurrence count (by using the `count` aggregation operator and collecting the result in a new field called `n`). -. Select the `n` field and create an alias for the ID field generated from the previous group operation (hence the call to `previousOperation()`) with a name of `tag`. -. Use the `sort` operation to sort the resulting list of tags by their occurrence count in descending order. -. Call the `aggregate` method on `MongoTemplate` to let MongoDB perform the actual aggregation operation, with the created `Aggregation` as an argument. - -Note that the input collection is explicitly specified as the `tags` parameter to the `aggregate` Method. If the name of the input collection is not specified explicitly, it is derived from the input class passed as the first parameter to the `newAggreation` method. - -[[mongo.aggregation.examples.example2]] -===== Aggregation Framework Example 2 - -This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state[Largest and Smallest Cities by State] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return the smallest and largest cities by population for each state by using the aggregation framework. This example demonstrates grouping, sorting, and projections (selection). - -[source,java] ----- -class ZipInfo { - String id; - String city; - String state; - @Field("pop") int population; - @Field("loc") double[] location; -} - -class City { - String name; - int population; -} - -class ZipInfoStats { - String id; - String state; - City biggestCity; - City smallestCity; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation aggregation = newAggregation(ZipInfo.class, - group("state", "city") - .sum("population").as("pop"), - sort(ASC, "pop", "state", "city"), - group("state") - .last("city").as("biggestCity") - .last("pop").as("biggestPop") - .first("city").as("smallestCity") - .first("pop").as("smallestPop"), - project() - .and("state").previousOperation() - .and("biggestCity") - .nested(bind("name", "biggestCity").and("population", "biggestPop")) - .and("smallestCity") - .nested(bind("name", "smallestCity").and("population", "smallestPop")), - sort(ASC, "state") -); - -AggregationResults result = mongoTemplate.aggregate(aggregation, ZipInfoStats.class); -ZipInfoStats firstZipInfoStats = result.getMappedResults().get(0); ----- - -Note that the `ZipInfo` class maps the structure of the given input-collection. The `ZipInfoStats` class defines the structure in the desired output format. - -The preceding listings use the following algorithm: - -. Use the `group` operation to define a group from the input-collection. The grouping criteria is the combination of the `state` and `city` fields, which forms the ID structure of the group. We aggregate the value of the `population` property from the grouped elements by using the `sum` operator and save the result in the `pop` field. -. Use the `sort` operation to sort the intermediate-result by the `pop`, `state` and `city` fields, in ascending order, such that the smallest city is at the top and the biggest city is at the bottom of the result. Note that the sorting on `state` and `city` is implicitly performed against the group ID fields (which Spring Data MongoDB handled). -. Use a `group` operation again to group the intermediate result by `state`. Note that `state` again implicitly references a group ID field. We select the name and the population count of the biggest and smallest city with calls to the `last(…)` and `first(...)` operators, respectively, in the `project` operation. -. Select the `state` field from the previous `group` operation. Note that `state` again implicitly references a group ID field. Because we do not want an implicitly generated ID to appear, we exclude the ID from the previous operation by using `and(previousOperation()).exclude()`. Because we want to populate the nested `City` structures in our output class, we have to emit appropriate sub-documents by using the nested method. -. Sort the resulting list of `StateStats` by their state name in ascending order in the `sort` operation. - -Note that we derive the name of the input collection from the `ZipInfo` class passed as the first parameter to the `newAggregation` method. - -[[mongo.aggregation.examples.example3]] -===== Aggregation Framework Example 3 - -This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#states-with-populations-over-10-million[States with Populations Over 10 Million] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return all states with a population greater than 10 million, using the aggregation framework. This example demonstrates grouping, sorting, and matching (filtering). - -[source,java] ----- -class StateStats { - @Id String id; - String state; - @Field("totalPop") int totalPopulation; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation agg = newAggregation(ZipInfo.class, - group("state").sum("population").as("totalPop"), - sort(ASC, previousOperation(), "totalPop"), - match(where("totalPop").gte(10 * 1000 * 1000)) -); - -AggregationResults result = mongoTemplate.aggregate(agg, StateStats.class); -List stateStatsList = result.getMappedResults(); ----- - -The preceding listings use the following algorithm: - -. Group the input collection by the `state` field and calculate the sum of the `population` field and store the result in the new field `"totalPop"`. -. Sort the intermediate result by the id-reference of the previous group operation in addition to the `"totalPop"` field in ascending order. -. Filter the intermediate result by using a `match` operation which accepts a `Criteria` query as an argument. - -Note that we derive the name of the input collection from the `ZipInfo` class passed as first parameter to the `newAggregation` method. - -[[mongo.aggregation.examples.example4]] -===== Aggregation Framework Example 4 - -This example demonstrates the use of simple arithmetic operations in the projection operation. - -[source,java] ----- -class Product { - String id; - String name; - double netPrice; - int spaceUnits; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation agg = newAggregation(Product.class, - project("name", "netPrice") - .and("netPrice").plus(1).as("netPricePlus1") - .and("netPrice").minus(1).as("netPriceMinus1") - .and("netPrice").multiply(1.19).as("grossPrice") - .and("netPrice").divide(2).as("netPriceDiv2") - .and("spaceUnits").mod(2).as("spaceUnitsMod2") -); - -AggregationResults result = mongoTemplate.aggregate(agg, Document.class); -List resultList = result.getMappedResults(); ----- - -Note that we derive the name of the input collection from the `Product` class passed as first parameter to the `newAggregation` method. - -[[mongo.aggregation.examples.example5]] -===== Aggregation Framework Example 5 - -This example demonstrates the use of simple arithmetic operations derived from SpEL Expressions in the projection operation. - -[source,java] ----- -class Product { - String id; - String name; - double netPrice; - int spaceUnits; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation agg = newAggregation(Product.class, - project("name", "netPrice") - .andExpression("netPrice + 1").as("netPricePlus1") - .andExpression("netPrice - 1").as("netPriceMinus1") - .andExpression("netPrice / 2").as("netPriceDiv2") - .andExpression("netPrice * 1.19").as("grossPrice") - .andExpression("spaceUnits % 2").as("spaceUnitsMod2") - .andExpression("(netPrice * 0.8 + 1.2) * 1.19").as("grossPriceIncludingDiscountAndCharge") - -); - -AggregationResults result = mongoTemplate.aggregate(agg, Document.class); -List resultList = result.getMappedResults(); ----- - -[[mongo.aggregation.examples.example6]] -===== Aggregation Framework Example 6 - -This example demonstrates the use of complex arithmetic operations derived from SpEL Expressions in the projection operation. - -Note: The additional parameters passed to the `addExpression` method can be referenced with indexer expressions according to their position. In this example, we reference the first parameter of the parameters array with `[0]`. When the SpEL expression is transformed into a MongoDB aggregation framework expression, external parameter expressions are replaced with their respective values. - -[source,java] ----- -class Product { - String id; - String name; - double netPrice; - int spaceUnits; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -double shippingCosts = 1.2; - -TypedAggregation agg = newAggregation(Product.class, - project("name", "netPrice") - .andExpression("(netPrice * (1-discountRate) + [0]) * (1+taxRate)", shippingCosts).as("salesPrice") -); - -AggregationResults result = mongoTemplate.aggregate(agg, Document.class); -List resultList = result.getMappedResults(); ----- - -Note that we can also refer to other fields of the document within the SpEL expression. - -[[mongo.aggregation.examples.example7]] -===== Aggregation Framework Example 7 - -This example uses conditional projection. It is derived from the https://docs.mongodb.com/manual/reference/operator/aggregation/cond/[$cond reference documentation]. - -[source,java] ----- -public class InventoryItem { - - @Id int id; - String item; - String description; - int qty; -} - -public class InventoryItemProjection { - - @Id int id; - String item; - String description; - int qty; - int discount -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation agg = newAggregation(InventoryItem.class, - project("item").and("discount") - .applyCondition(ConditionalOperator.newBuilder().when(Criteria.where("qty").gte(250)) - .then(30) - .otherwise(20)) - .and(ifNull("description", "Unspecified")).as("description") -); - -AggregationResults result = mongoTemplate.aggregate(agg, "inventory", InventoryItemProjection.class); -List stateStatsList = result.getMappedResults(); ----- - -This one-step aggregation uses a projection operation with the `inventory` collection. We project the `discount` field by using a conditional operation for all inventory items that have a `qty` greater than or equal to `250`. A second conditional projection is performed for the `description` field. We apply the `Unspecified` description to all items that either do not have a `description` field or items that have a `null` description. - -As of MongoDB 3.6, it is possible to exclude fields from the projection by using a conditional expression. - -.Conditional aggregation projection -==== -[source,java] ----- -TypedAggregation agg = Aggregation.newAggregation(Book.class, - project("title") - .and(ConditionalOperators.when(ComparisonOperators.valueOf("author.middle") <1> - .equalToValue("")) <2> - .then("$$REMOVE") <3> - .otherwiseValueOf("author.middle") <4> - ) - .as("author.middle")); ----- -<1> If the value of the field `author.middle` -<2> does not contain a value, -<3> then use https://docs.mongodb.com/manual/reference/aggregation-variables/#variable.REMOVE[``$$REMOVE``] to exclude the field. -<4> Otherwise, add the field value of `author.middle`. -==== +include::aggregation-framework.adoc[] [[mongo-template.index-and-collections]] == Index and Collection Management @@ -3267,122 +2612,7 @@ boolean hasIndex = template.execute("geolocation", new CollectionCallbackBoolean }); ---- -[[gridfs]] -== GridFS Support - -MongoDB supports storing binary files inside its filesystem, GridFS. Spring Data MongoDB provides a `GridFsOperations` interface as well as the corresponding implementation, `GridFsTemplate`, to let you interact with the filesystem. You can set up a `GridFsTemplate` instance by handing it a `MongoDatabaseFactory` as well as a `MongoConverter`, as the following example shows: - -.JavaConfig setup for a GridFsTemplate -==== -[source,java] ----- -class GridFsConfiguration extends AbstractMongoClientConfiguration { - - // … further configuration omitted - - @Bean - public GridFsTemplate gridFsTemplate() { - return new GridFsTemplate(mongoDbFactory(), mappingMongoConverter()); - } -} ----- -==== - -The corresponding XML configuration follows: - -.XML configuration for a GridFsTemplate -==== -[source,xml] ----- - - - - - - - - - - - - ----- -==== - -The template can now be injected and used to perform storage and retrieval operations, as the following example shows: - -.Using GridFsTemplate to store files -==== -[source,java] ----- -class GridFsClient { - - @Autowired - GridFsOperations operations; - - @Test - public void storeFileToGridFs() { - - FileMetadata metadata = new FileMetadata(); - // populate metadata - Resource file = … // lookup File or Resource - - operations.store(file.getInputStream(), "filename.txt", metadata); - } -} ----- -==== - -The `store(…)` operations take an `InputStream`, a filename, and (optionally) metadata information about the file to store. The metadata can be an arbitrary object, which will be marshaled by the `MongoConverter` configured with the `GridFsTemplate`. Alternatively, you can also provide a `Document`. - -You can read files from the filesystem through either the `find(…)` or the `getResources(…)` methods. Let's have a look at the `find(…)` methods first. You can either find a single file or multiple files that match a `Query`. You can use the `GridFsCriteria` helper class to define queries. It provides static factory methods to encapsulate default metadata fields (such as `whereFilename()` and `whereContentType()`) or a custom one through `whereMetaData()`. The following example shows how to use `GridFsTemplate` to query for files: - -.Using GridFsTemplate to query for files -==== -[source,java] ----- -class GridFsClient { - - @Autowired - GridFsOperations operations; - - @Test - public void findFilesInGridFs() { - GridFSFindIterable result = operations.find(query(whereFilename().is("filename.txt"))) - } -} ----- -==== - -NOTE: Currently, MongoDB does not support defining sort criteria when retrieving files from GridFS. For this reason, any sort criteria defined on the `Query` instance handed into the `find(…)` method are disregarded. - -The other option to read files from the GridFs is to use the methods introduced by the `ResourcePatternResolver` interface. They allow handing an Ant path into the method and can thus retrieve files matching the given pattern. The following example shows how to use `GridFsTemplate` to read files: - -.Using GridFsTemplate to read files -==== -[source,java] ----- -class GridFsClient { - - @Autowired - GridFsOperations operations; - - @Test - public void readFilesFromGridFs() { - GridFsResources[] txtFiles = operations.getResources("*.txt"); - } -} ----- -==== - -`GridFsOperations` extends `ResourcePatternResolver` and lets the `GridFsTemplate` (for example) to be plugged into an `ApplicationContext` to read Spring Config files from MongoDB database. - +include::gridfs.adoc[] include::tailable-cursors.adoc[] include::change-streams.adoc[] include::time-series.adoc[] From 75b5a548b61c3ea1fa70bb60cdc69ff6ee2c91c6 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 24 Aug 2021 11:23:42 +0200 Subject: [PATCH 045/885] Polishing. Fix asterisk callouts. See #3786 --- .../asciidoc/reference/aggregation-framework.adoc | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index a843af17f4..547b3b1530 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -57,7 +57,7 @@ Note that, if you provide an input class as the first parameter to the `newAggre The MongoDB Aggregation Framework provides the following types of aggregation operations: * Pipeline Aggregation Operators -* Group Aggregation Operators +* Group/Accumulator Aggregation Operators * Boolean Aggregation Operators * Comparison Aggregation Operators * Arithmetic Aggregation Operators @@ -82,19 +82,16 @@ At the time of this writing, we provide support for the following Aggregation Op | `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue` | Group/Accumulator Aggregation Operators -| `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp` +| `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `count` (+++*+++), `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators -| `abs`, `add` (via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (*via `minus`), `trunc` - -| Document Operators -| `rank`, `denseRank`, `documentNumber`, `shift` +| `abs`, `add` (+++*+++ via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `trunc` | String Aggregation Operators | `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` | Comparison Aggregation Operators -| `eq` (via: `is`), `gt`, `gte`, `lt`, `lte`, `ne` +| `eq` (+++*+++ via `is`), `gt`, `gte`, `lt`, `lte`, `ne` | Array Aggregation Operators | `arrayElementAt`, `arrayToObject`, `concatArrays`, `filter`, `in`, `indexOfArray`, `isArray`, `range`, `reverseArray`, `reduce`, `size`, `slice`, `zip` @@ -124,9 +121,9 @@ At the time of this writing, we provide support for the following Aggregation Op | `function`, `accumulator` |=== -* The operation is mapped or added by Spring Data MongoDB. ++++*+++ The operation is mapped or added by Spring Data MongoDB. -Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB.Comparison aggregation operators are expressed as `Criteria` expressions. +Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB. Comparison aggregation operators are expressed as `Criteria` expressions. [[mongo.aggregation.projection]] === Projection Expressions From 82b33331fcc998cbd29421e9a6f1d59f3a7fd0a2 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Thu, 22 Jul 2021 14:08:06 +0200 Subject: [PATCH 046/885] Add support for `$derivative` aggregation operator. Closes: #3716 Original pull request: #3742. --- .../core/aggregation/ArithmeticOperators.java | 55 +++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 1 + .../ArithmeticOperatorsUnitTests.java | 7 +++ .../SpelExpressionTransformerUnitTests.java | 5 ++ 4 files changed, 68 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index b27e54d298..fe54f2434e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -26,7 +26,9 @@ import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevPop; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevSamp; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Sum; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; /** * Gateway to {@literal Arithmetic} aggregation operations that perform math operations on numbers. @@ -591,6 +593,31 @@ public Round roundToPlace(int place) { return round().place(place); } + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative() { + return derivative(null); + } + + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @param unit The time unit ({@literal week, day, hour, minute, second, millisecond}) to apply can be + * {@literal null}. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative(@Nullable String unit) { + + Derivative derivative = usesFieldRef() ? Derivative.derivativeOf(fieldReference) + : Derivative.derivativeOf(expression); + return StringUtils.hasText(unit) ? derivative.unit(unit) : derivative; + } + private boolean usesFieldRef() { return fieldReference != null; } @@ -1724,4 +1751,32 @@ protected String getMongoMethod() { return "$round"; } } + + public static class Derivative extends AbstractAggregationExpression { + + private Derivative(Object value) { + super(value); + } + + public static Derivative derivativeOf(String fieldReference) { + return new Derivative(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + public static Derivative derivativeOf(AggregationExpression expression) { + return new Derivative(Collections.singletonMap("input", expression)); + } + + public static Derivative derivativeOfValue(Number value) { + return new Derivative(Collections.singletonMap("input", value)); + } + + public Derivative unit(String unit) { + return new Derivative(append("unit", unit)); + } + + @Override + protected String getMongoMethod() { + return "$derivative"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 4052b2cbaa..928869e93b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -91,6 +91,7 @@ public class MethodReferenceNode extends ExpressionNode { map.put("subtract", arrayArgRef().forOperator("$subtract")); map.put("trunc", singleArgRef().forOperator("$trunc")); map.put("round", arrayArgRef().forOperator("$round")); + map.put("derivative", mapArgRef().forOperator("$derivative").mappingParametersTo("input", "unit")); // STRING OPERATORS map.put("concat", arrayArgRef().forOperator("$concat")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index 940a315239..b4f3cdadb6 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -59,4 +59,11 @@ void roundShouldWithPlaceFromExpression() { .toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo(new Document("$round", Arrays.asList("$field", new Document("$first", "$source")))); } + + @Test // GH-3716 + void rendersDerivativeCorrectly() { + + assertThat(valueOf("miles").derivative("hour").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $derivative: { input: \"$miles\", unit: \"hour\" } }")); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index ee55818018..2653c52f2d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -989,6 +989,11 @@ void rendersShiftWithDefault() { } @Nullable + @Test // GH-3716 + void shouldRenderDerivative() { + assertThat(transform("derivative(miles, 'hour')")).isEqualTo(Document.parse("{ \"$derivative\" : { input : '$miles', unit : 'hour'} }")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From 10c0203605b421d47764866303113b02dfdc4e3e Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 24 Aug 2021 14:29:46 +0200 Subject: [PATCH 047/885] Polishing. Accept window units in addition to plain strings. Document operator. See: #3716 Original pull request: #3742. --- .../core/aggregation/ArithmeticOperators.java | 21 ++++++++++++++++++- .../ArithmeticOperatorsUnitTests.java | 6 ++++-- .../reference/aggregation-framework.adoc | 3 ++- 3 files changed, 26 insertions(+), 4 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index fe54f2434e..39579fc7b6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -17,6 +17,7 @@ import java.util.Collections; import java.util.List; +import java.util.Locale; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Avg; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovariancePop; @@ -26,6 +27,8 @@ import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevPop; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevSamp; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Sum; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.WindowUnit; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.WindowUnits; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -34,6 +37,7 @@ * Gateway to {@literal Arithmetic} aggregation operations that perform math operations on numbers. * * @author Christoph Strobl + * @author Mark Paluch * @since 1.10 */ public class ArithmeticOperators { @@ -600,7 +604,22 @@ public Round roundToPlace(int place) { * @since 3.3 */ public Derivative derivative() { - return derivative(null); + return derivative((String) null); + } + + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @param unit The time unit ({@link WindowUnits#WEEK}, {@link WindowUnits#DAY}, {@link WindowUnits#HOUR}, + * {@link WindowUnits#MINUTE}, {@link WindowUnits#SECOND}, {@link WindowUnits#MILLISECOND}) to apply. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative(WindowUnit unit) { + + Assert.notNull(unit, "Window unit must not be null"); + + return derivative(unit.name().toLowerCase(Locale.ROOT)); } /** diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index b4f3cdadb6..da03bc5c61 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -28,8 +28,9 @@ * Unit tests for {@link Round}. * * @author Christoph Strobl + * @author Mark Paluch */ -public class ArithmeticOperatorsUnitTests { +class ArithmeticOperatorsUnitTests { @Test // DATAMONGO-2370 void roundShouldWithoutPlace() { @@ -63,7 +64,8 @@ void roundShouldWithPlaceFromExpression() { @Test // GH-3716 void rendersDerivativeCorrectly() { - assertThat(valueOf("miles").derivative("hour").toDocument(Aggregation.DEFAULT_CONTEXT)) + assertThat( + valueOf("miles").derivative(SetWindowFieldsOperation.WindowUnits.HOUR).toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo(Document.parse("{ $derivative: { input: \"$miles\", unit: \"hour\" } }")); } } diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index 547b3b1530..2624e6c27e 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -85,7 +85,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `count` (+++*+++), `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators -| `abs`, `add` (+++*+++ via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `trunc` +| `abs`, `add` (+++*+++ via `plus`), `ceil`, `derivative`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `trunc` | String Aggregation Operators | `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` @@ -119,6 +119,7 @@ At the time of this writing, we provide support for the following Aggregation Op | Script Aggregation Operators | `function`, `accumulator` + |=== +++*+++ The operation is mapped or added by Spring Data MongoDB. From 6bd0f758fed10eb92c55bd22da7f5b8b40b387a1 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Fri, 23 Jul 2021 10:10:18 +0200 Subject: [PATCH 048/885] Extend support for `$ifNull` to cover multiple conditions. Closes: #3720 Original pull request: #3745. --- .../aggregation/ConditionalOperators.java | 69 +++++++++++++++---- .../ConditionalOperatorsUnitTests.java | 35 ++++++++++ 2 files changed, 90 insertions(+), 14 deletions(-) create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java index 1d3890ce89..95a0290ec2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java @@ -17,6 +17,7 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; @@ -282,19 +283,29 @@ public Document toDocument(AggregationOperationContext context) { List list = new ArrayList(); - if (condition instanceof Field) { - list.add(context.getReference((Field) condition).toString()); - } else if (condition instanceof AggregationExpression) { - list.add(((AggregationExpression) condition).toDocument(context)); + if(condition instanceof Collection) { + for(Object val : ((Collection)this.condition)) { + list.add(mapCondition(val, context)); + } } else { - list.add(condition); + list.add(mapCondition(condition, context)); } list.add(resolve(value, context)); - return new Document("$ifNull", list); } + private Object mapCondition(Object condition, AggregationOperationContext context) { + + if (condition instanceof Field) { + return context.getReference((Field) condition).toString(); + } else if (condition instanceof AggregationExpression) { + return ((AggregationExpression) condition).toDocument(context); + } else { + return condition; + } + } + private Object resolve(Object value, AggregationOperationContext context) { if (value instanceof Field) { @@ -323,15 +334,34 @@ public interface IfNullBuilder { /** * @param expression the expression to check for a {@literal null} value, field name must not be {@literal null} * or empty. - * @return the {@link ThenBuilder} + * @return the {@link ThenBuilder}. */ ThenBuilder ifNull(AggregationExpression expression); } + /** + * @author Christoph Strobl + * @since 3.3 + */ + public interface OrBuilder { + + /** + * @param fieldReference the field to check for a {@literal null} value, field reference must not be {@literal null}. + * @return the {@link ThenBuilder} + */ + ThenBuilder orIfNull(String fieldReference); + + /** + * @param expression the expression to check for a {@literal null} value, + * @return the {@link ThenBuilder}. + */ + ThenBuilder orIfNull(AggregationExpression expression); + } + /** * @author Mark Paluch */ - public interface ThenBuilder { + public interface ThenBuilder extends OrBuilder { /** * @param value the value to be used if the {@code $ifNull} condition evaluates {@literal true}. Can be a @@ -361,9 +391,10 @@ public interface ThenBuilder { */ static final class IfNullOperatorBuilder implements IfNullBuilder, ThenBuilder { - private @Nullable Object condition; + private @Nullable List conditions; private IfNullOperatorBuilder() { + conditions = new ArrayList<>(); } /** @@ -381,7 +412,7 @@ public static IfNullOperatorBuilder newBuilder() { public ThenBuilder ifNull(String fieldReference) { Assert.hasText(fieldReference, "FieldReference name must not be null or empty!"); - this.condition = Fields.field(fieldReference); + this.conditions.add(Fields.field(fieldReference)); return this; } @@ -392,15 +423,25 @@ public ThenBuilder ifNull(String fieldReference) { public ThenBuilder ifNull(AggregationExpression expression) { Assert.notNull(expression, "AggregationExpression name must not be null or empty!"); - this.condition = expression; + this.conditions.add(expression); return this; } + @Override + public ThenBuilder orIfNull(String fieldReference) { + return ifNull(fieldReference); + } + + @Override + public ThenBuilder orIfNull(AggregationExpression expression) { + return ifNull(expression); + } + /* (non-Javadoc) * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.ThenBuilder#then(java.lang.Object) */ public IfNull then(Object value) { - return new IfNull(condition, value); + return new IfNull(conditions, value); } /* (non-Javadoc) @@ -409,7 +450,7 @@ public IfNull then(Object value) { public IfNull thenValueOf(String fieldReference) { Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new IfNull(condition, Fields.field(fieldReference)); + return new IfNull(conditions, Fields.field(fieldReference)); } /* (non-Javadoc) @@ -418,7 +459,7 @@ public IfNull thenValueOf(String fieldReference) { public IfNull thenValueOf(AggregationExpression expression) { Assert.notNull(expression, "Expression must not be null!"); - return new IfNull(condition, expression); + return new IfNull(conditions, expression); } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java new file mode 100644 index 0000000000..132600cbb5 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java @@ -0,0 +1,35 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.ConditionalOperators.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * @author Christoph Strobl + */ +public class ConditionalOperatorsUnitTests { + + @Test // GH-3720 + void rendersIfNullWithMultipleConditionalValuesCorrectly() { + + assertThat(ifNull("description").orIfNull("quantity").then("Unspecified").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $ifNull: [ \"$description\", \"$quantity\", \"Unspecified\" ] }")); + } +} From fd0a402c99e402c6ef69538f34be515c12dc54d6 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 24 Aug 2021 14:34:05 +0200 Subject: [PATCH 049/885] Polishing. See #3720 Original pull request: #3745. --- .../aggregation/ConditionalOperators.java | 37 ++++++++++--------- .../ConditionalOperatorsUnitTests.java | 4 +- 2 files changed, 22 insertions(+), 19 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java index 95a0290ec2..1979ec78f4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java @@ -236,7 +236,7 @@ private boolean usesCriteriaDefinition() { * * @author Mark Paluch * @see https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/ + * "https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/">https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/ */ public static class IfNull implements AggregationExpression { @@ -252,7 +252,8 @@ private IfNull(Object condition, Object value) { /** * Creates new {@link IfNull}. * - * @param fieldReference the field to check for a {@literal null} value, field reference must not be {@literal null}. + * @param fieldReference the field to check for a {@literal null} value, field reference must not be + * {@literal null}. * @return never {@literal null}. */ public static ThenBuilder ifNull(String fieldReference) { @@ -265,7 +266,7 @@ public static ThenBuilder ifNull(String fieldReference) { * Creates new {@link IfNull}. * * @param expression the expression to check for a {@literal null} value, field reference must not be - * {@literal null}. + * {@literal null}. * @return never {@literal null}. */ public static ThenBuilder ifNull(AggregationExpression expression) { @@ -283,8 +284,8 @@ public Document toDocument(AggregationOperationContext context) { List list = new ArrayList(); - if(condition instanceof Collection) { - for(Object val : ((Collection)this.condition)) { + if (condition instanceof Collection) { + for (Object val : ((Collection) this.condition)) { list.add(mapCondition(val, context)); } } else { @@ -326,14 +327,14 @@ public interface IfNullBuilder { /** * @param fieldReference the field to check for a {@literal null} value, field reference must not be - * {@literal null}. + * {@literal null}. * @return the {@link ThenBuilder} */ ThenBuilder ifNull(String fieldReference); /** * @param expression the expression to check for a {@literal null} value, field name must not be {@literal null} - * or empty. + * or empty. * @return the {@link ThenBuilder}. */ ThenBuilder ifNull(AggregationExpression expression); @@ -346,7 +347,8 @@ public interface IfNullBuilder { public interface OrBuilder { /** - * @param fieldReference the field to check for a {@literal null} value, field reference must not be {@literal null}. + * @param fieldReference the field to check for a {@literal null} value, field reference must not be + * {@literal null}. * @return the {@link ThenBuilder} */ ThenBuilder orIfNull(String fieldReference); @@ -365,8 +367,8 @@ public interface ThenBuilder extends OrBuilder { /** * @param value the value to be used if the {@code $ifNull} condition evaluates {@literal true}. Can be a - * {@link Document}, a value that is supported by MongoDB or a value that can be converted to a MongoDB - * representation but must not be {@literal null}. + * {@link Document}, a value that is supported by MongoDB or a value that can be converted to a MongoDB + * representation but must not be {@literal null}. * @return new instance of {@link IfNull}. */ IfNull then(Object value); @@ -499,7 +501,7 @@ public static Switch switchCases(CaseOperator... conditions) { public static Switch switchCases(List conditions) { Assert.notNull(conditions, "Conditions must not be null!"); - return new Switch(Collections.singletonMap("branches", new ArrayList(conditions))); + return new Switch(Collections. singletonMap("branches", new ArrayList(conditions))); } /** @@ -586,7 +588,7 @@ public interface ThenBuilder { * @author Mark Paluch * @author Christoph Strobl * @see https://docs.mongodb.com/manual/reference/operator/aggregation/cond/ + * "https://docs.mongodb.com/manual/reference/operator/aggregation/cond/">https://docs.mongodb.com/manual/reference/operator/aggregation/cond/ */ public static class Cond implements AggregationExpression { @@ -847,8 +849,8 @@ public interface ThenBuilder { /** * @param value the value to be used if the condition evaluates {@literal true}. Can be a {@link Document}, a - * value that is supported by MongoDB or a value that can be converted to a MongoDB representation but - * must not be {@literal null}. + * value that is supported by MongoDB or a value that can be converted to a MongoDB representation but + * must not be {@literal null}. * @return the {@link OtherwiseBuilder} */ OtherwiseBuilder then(Object value); @@ -873,8 +875,8 @@ public interface OtherwiseBuilder { /** * @param value the value to be used if the condition evaluates {@literal false}. Can be a {@link Document}, a - * value that is supported by MongoDB or a value that can be converted to a MongoDB representation but - * must not be {@literal null}. + * value that is supported by MongoDB or a value that can be converted to a MongoDB representation but + * must not be {@literal null}. * @return the {@link Cond} */ Cond otherwise(Object value); @@ -902,8 +904,7 @@ static class ConditionalExpressionBuilder implements WhenBuilder, ThenBuilder, O private @Nullable Object condition; private @Nullable Object thenValue; - private ConditionalExpressionBuilder() { - } + private ConditionalExpressionBuilder() {} /** * Creates a new builder for {@link Cond}. diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java index 132600cbb5..3b88781616 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java @@ -22,9 +22,11 @@ import org.junit.jupiter.api.Test; /** + * Unit tests for {@link ConditionalOperators}. + * * @author Christoph Strobl */ -public class ConditionalOperatorsUnitTests { +class ConditionalOperatorsUnitTests { @Test // GH-3720 void rendersIfNullWithMultipleConditionalValuesCorrectly() { From df2b2a2f685b46939126c5aa9998d34a8e5270d6 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Fri, 23 Jul 2021 10:46:27 +0200 Subject: [PATCH 050/885] Add support for `$integral` aggregation operator. Closes: #3721 Original pull request: #3746. --- .../core/aggregation/ArithmeticOperators.java | 87 +++++++++++++++++-- .../core/spel/MethodReferenceNode.java | 1 + .../ArithmeticOperatorsUnitTests.java | 10 +++ .../SpelExpressionTransformerUnitTests.java | 10 +++ 4 files changed, 100 insertions(+), 8 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 39579fc7b6..4d86bac98e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -216,6 +216,27 @@ public Floor floor() { return usesFieldRef() ? Floor.floorValueOf(fieldReference) : Floor.floorValueOf(expression); } + /** + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @return new instance of {@link Integral}. + * @since 3.3 + */ + public Integral integral() { + return usesFieldRef() ? Integral.integralOf(fieldReference) : Integral.integralOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @param unit the unit of measure. + * @return new instance of {@link Integral}. + * @since 3.3 + */ + public Integral integral(String unit) { + return integral().unit(unit); + } + /** * Creates new {@link AggregationExpression} that calculates the natural logarithm ln (i.e loge) of the assoicated * number. @@ -520,8 +541,8 @@ public StdDevSamp stdDevSamp() { } /** - * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the given - * field to calculate the population covariance of the two. + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the population covariance of the two. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link CovariancePop}. @@ -532,8 +553,8 @@ public CovariancePop covariancePop(String fieldReference) { } /** - * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the given - * {@link AggregationExpression expression} to calculate the population covariance of the two. + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the population covariance of the two. * * @param expression must not be {@literal null}. * @return new instance of {@link CovariancePop}. @@ -548,8 +569,8 @@ private CovariancePop covariancePop() { } /** - * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the given - * field to calculate the sample covariance of the two. + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the sample covariance of the two. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link CovariancePop}. @@ -560,8 +581,8 @@ public CovarianceSamp covarianceSamp(String fieldReference) { } /** - * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the given - * {@link AggregationExpression expression} to calculate the sample covariance of the two. + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the sample covariance of the two. * * @param expression must not be {@literal null}. * @return new instance of {@link CovariancePop}. @@ -1798,4 +1819,54 @@ protected String getMongoMethod() { return "$derivative"; } } + + /** + * Value object to represent an {@link AggregationExpression expression} that calculates the approximation for the + * mathematical integral value. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Integral extends AbstractAggregationExpression { + + private Integral(Object value) { + super(value); + } + + /** + * Create a new instance of {@link Integral} for the value stored at the given field holding a numeric value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Integral}. + */ + public static Integral integralOf(String fieldReference) { + return new Integral(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Create a new instance of {@link Integral} for the value provided by the given expression that resolves to a + * numeric value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Integral}. + */ + public static Integral integralOf(AggregationExpression expression) { + return new Integral(Collections.singletonMap("input", expression)); + } + + /** + * Set the unit of measure. + * + * @param unit the unit of measure. + * @return new instance of {@link Integral}. + */ + public Integral unit(String unit) { + return new Integral(append("unit", unit)); + } + + @Override + protected String getMongoMethod() { + return "$integral"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 928869e93b..9be1368caf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -92,6 +92,7 @@ public class MethodReferenceNode extends ExpressionNode { map.put("trunc", singleArgRef().forOperator("$trunc")); map.put("round", arrayArgRef().forOperator("$round")); map.put("derivative", mapArgRef().forOperator("$derivative").mappingParametersTo("input", "unit")); + map.put("integral", mapArgRef().forOperator("$integral").mappingParametersTo("input", "unit")); // STRING OPERATORS map.put("concat", arrayArgRef().forOperator("$concat")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index da03bc5c61..1aab826a23 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -68,4 +68,14 @@ void rendersDerivativeCorrectly() { valueOf("miles").derivative(SetWindowFieldsOperation.WindowUnits.HOUR).toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo(Document.parse("{ $derivative: { input: \"$miles\", unit: \"hour\" } }")); } + + @Test // GH-3721 + void rendersIntegral() { + assertThat(valueOf("kilowatts").integral().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse("{ $integral : { input : \"$kilowatts\" } }")); + } + + @Test // GH-3721 + void rendersIntegralWithUnit() { + assertThat(valueOf("kilowatts").integral("hour").toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse("{ $integral : { input : \"$kilowatts\", unit : \"hour\" } }")); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 2653c52f2d..a0fad05a3c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -994,6 +994,16 @@ void shouldRenderDerivative() { assertThat(transform("derivative(miles, 'hour')")).isEqualTo(Document.parse("{ \"$derivative\" : { input : '$miles', unit : 'hour'} }")); } + @Test // GH-3721 + public void shouldRenderIntegral() { + assertThat(transform("integral(field)")).isEqualTo(Document.parse("{ \"$integral\" : { \"input\" : \"$field\" }}")); + } + + @Test // GH-3721 + public void shouldIntegralWithUnit() { + assertThat(transform("integral(field, 'hour')")).isEqualTo(Document.parse("{ \"$integral\" : { \"input\" : \"$field\", \"unit\" : \"hour\" }}")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From 2a3a4cf030eefdcdd491cd68c532cfffff86bd20 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 24 Aug 2021 14:59:58 +0200 Subject: [PATCH 051/885] Polishing. Fix method order from earlier merges. Add missing Javadoc. Simplify tests. Update documentation. See #3721 Original pull request: #3746. --- .../core/aggregation/ArithmeticOperators.java | 118 ++++++++++++------ .../ArithmeticOperatorsUnitTests.java | 11 +- .../SpelExpressionTransformerUnitTests.java | 51 ++++---- .../reference/aggregation-framework.adoc | 2 +- 4 files changed, 109 insertions(+), 73 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 4d86bac98e..159c6bbeae 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -155,6 +155,46 @@ public Ceil ceil() { return usesFieldRef() ? Ceil.ceilValueOf(fieldReference) : Ceil.ceilValueOf(expression); } + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative() { + return derivative((String) null); + } + + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @param unit The time unit ({@link WindowUnits#WEEK}, {@link WindowUnits#DAY}, {@link WindowUnits#HOUR}, + * {@link WindowUnits#MINUTE}, {@link WindowUnits#SECOND}, {@link WindowUnits#MILLISECOND}) to apply. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative(WindowUnit unit) { + + Assert.notNull(unit, "Window unit must not be null"); + + return derivative(unit.name().toLowerCase(Locale.ROOT)); + } + + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @param unit The time unit ({@literal week, day, hour, minute, second, millisecond}) to apply can be + * {@literal null}. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative(@Nullable String unit) { + + Derivative derivative = usesFieldRef() ? Derivative.derivativeOf(fieldReference) + : Derivative.derivativeOf(expression); + return StringUtils.hasText(unit) ? derivative.unit(unit) : derivative; + } + /** * Creates new {@link AggregationExpression} that ivides the associated number by number referenced via * {@literal fieldReference}. @@ -226,6 +266,21 @@ public Integral integral() { return usesFieldRef() ? Integral.integralOf(fieldReference) : Integral.integralOf(expression); } + /** + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @param unit The time unit ({@link WindowUnits#WEEK}, {@link WindowUnits#DAY}, {@link WindowUnits#HOUR}, + * {@link WindowUnits#MINUTE}, {@link WindowUnits#SECOND}, {@link WindowUnits#MILLISECOND}) to apply. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Integral integral(WindowUnit unit) { + + Assert.notNull(unit, "Window unit must not be null"); + + return integral(unit.name().toLowerCase(Locale.ROOT)); + } + /** * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. * @@ -234,6 +289,9 @@ public Integral integral() { * @since 3.3 */ public Integral integral(String unit) { + + Assert.hasText(unit, "Unit must not be empty!"); + return integral().unit(unit); } @@ -618,46 +676,6 @@ public Round roundToPlace(int place) { return round().place(place); } - /** - * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. - * - * @return new instance of {@link Derivative}. - * @since 3.3 - */ - public Derivative derivative() { - return derivative((String) null); - } - - /** - * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. - * - * @param unit The time unit ({@link WindowUnits#WEEK}, {@link WindowUnits#DAY}, {@link WindowUnits#HOUR}, - * {@link WindowUnits#MINUTE}, {@link WindowUnits#SECOND}, {@link WindowUnits#MILLISECOND}) to apply. - * @return new instance of {@link Derivative}. - * @since 3.3 - */ - public Derivative derivative(WindowUnit unit) { - - Assert.notNull(unit, "Window unit must not be null"); - - return derivative(unit.name().toLowerCase(Locale.ROOT)); - } - - /** - * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. - * - * @param unit The time unit ({@literal week, day, hour, minute, second, millisecond}) to apply can be - * {@literal null}. - * @return new instance of {@link Derivative}. - * @since 3.3 - */ - public Derivative derivative(@Nullable String unit) { - - Derivative derivative = usesFieldRef() ? Derivative.derivativeOf(fieldReference) - : Derivative.derivativeOf(expression); - return StringUtils.hasText(unit) ? derivative.unit(unit) : derivative; - } - private boolean usesFieldRef() { return fieldReference != null; } @@ -1792,16 +1810,36 @@ protected String getMongoMethod() { } } + /** + * Value object to represent an {@link AggregationExpression expression} that calculates the average rate of change + * within the specified window. + * + * @author Christoph Strobl + * @since 3.3 + */ public static class Derivative extends AbstractAggregationExpression { private Derivative(Object value) { super(value); } + /** + * Create a new instance of {@link Derivative} for the value stored at the given field holding a numeric value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Derivative}. + */ public static Derivative derivativeOf(String fieldReference) { return new Derivative(Collections.singletonMap("input", Fields.field(fieldReference))); } + /** + * Create a new instance of {@link Derivative} for the value provided by the given expression that resolves to a + * numeric value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Derivative}. + */ public static Derivative derivativeOf(AggregationExpression expression) { return new Derivative(Collections.singletonMap("input", expression)); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index 1aab826a23..d57363d91c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -15,8 +15,8 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.aggregation.ArithmeticOperators.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.util.Arrays; import java.util.Collections; @@ -66,16 +66,19 @@ void rendersDerivativeCorrectly() { assertThat( valueOf("miles").derivative(SetWindowFieldsOperation.WindowUnits.HOUR).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $derivative: { input: \"$miles\", unit: \"hour\" } }")); + .isEqualTo("{ $derivative: { input: \"$miles\", unit: \"hour\" } }"); } @Test // GH-3721 void rendersIntegral() { - assertThat(valueOf("kilowatts").integral().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse("{ $integral : { input : \"$kilowatts\" } }")); + assertThat(valueOf("kilowatts").integral().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $integral : { input : \"$kilowatts\" } }"); } @Test // GH-3721 void rendersIntegralWithUnit() { - assertThat(valueOf("kilowatts").integral("hour").toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse("{ $integral : { input : \"$kilowatts\", unit : \"hour\" } }")); + assertThat(valueOf("kilowatts").integral(SetWindowFieldsOperation.WindowUnits.HOUR) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $integral : { input : \"$kilowatts\", unit : \"hour\" } }"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index a0fad05a3c..0450e556c4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -23,8 +23,8 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; + import org.springframework.data.mongodb.core.Person; -import org.springframework.lang.Nullable; /** * Unit tests for {@link SpelExpressionTransformer}. @@ -152,8 +152,8 @@ void shouldRenderBinaryExpressionWithMixedSignsCorrectly() { @Test // DATAMONGO-774 void shouldRenderConsecutiveOperationsInComplexExpression() { - assertThat(transform("1 + 1 + (1 + 1 + 1) / q")).isEqualTo( - Document.parse("{ \"$add\" : [ 1 , 1 , { \"$divide\" : [ { \"$add\" : [ 1 , 1 , 1]} , \"$q\"]}]}")); + assertThat(transform("1 + 1 + (1 + 1 + 1) / q")) + .isEqualTo(Document.parse("{ \"$add\" : [ 1 , 1 , { \"$divide\" : [ { \"$add\" : [ 1 , 1 , 1]} , \"$q\"]}]}")); } @Test // DATAMONGO-774 @@ -189,8 +189,7 @@ void shouldRenderCompoundExpressionsWithIndexerAndFieldReference() { Person person = new Person(); person.setAge(10); - assertThat(transform("[0].age + a.c", person)) - .isEqualTo(Document.parse("{ \"$add\" : [ 10 , \"$a.c\"] }")); + assertThat(transform("[0].age + a.c", person)).isEqualTo(Document.parse("{ \"$add\" : [ 10 , \"$a.c\"] }")); } @Test // DATAMONGO-840 @@ -216,8 +215,7 @@ void shouldRenderMethodReferenceNodeNot() { @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeSetEquals() { - assertThat(transform("setEquals(a, b)")) - .isEqualTo(Document.parse("{ \"$setEquals\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("setEquals(a, b)")).isEqualTo(Document.parse("{ \"$setEquals\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 @@ -379,8 +377,7 @@ void shouldRenderMethodReferenceTrunc() { @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeConcat() { - assertThat(transform("concat(a, b, 'c')")) - .isEqualTo(Document.parse("{ \"$concat\" : [ \"$a\" , \"$b\" , \"c\"]}")); + assertThat(transform("concat(a, b, 'c')")).isEqualTo(Document.parse("{ \"$concat\" : [ \"$a\" , \"$b\" , \"c\"]}")); } @Test // DATAMONGO-1530 @@ -400,8 +397,7 @@ void shouldRenderMethodReferenceToUpper() { @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeStrCaseCmp() { - assertThat(transform("strcasecmp(a, b)")) - .isEqualTo(Document.parse("{ \"$strcasecmp\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("strcasecmp(a, b)")).isEqualTo(Document.parse("{ \"$strcasecmp\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 @@ -411,8 +407,7 @@ void shouldRenderMethodReferenceMeta() { @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeArrayElemAt() { - assertThat(transform("arrayElemAt(a, 10)")) - .isEqualTo(Document.parse("{ \"$arrayElemAt\" : [ \"$a\" , 10]}")); + assertThat(transform("arrayElemAt(a, 10)")).isEqualTo(Document.parse("{ \"$arrayElemAt\" : [ \"$a\" , 10]}")); } @Test // DATAMONGO-1530 @@ -511,15 +506,14 @@ void shouldRenderMethodReferenceMillisecond() { @Test // DATAMONGO-1530 void shouldRenderMethodReferenceDateToString() { - assertThat(transform("dateToString('%Y-%m-%d', $date)")).isEqualTo( - Document.parse("{ \"$dateToString\" : { \"format\" : \"%Y-%m-%d\" , \"date\" : \"$date\"}}")); + assertThat(transform("dateToString('%Y-%m-%d', $date)")) + .isEqualTo(Document.parse("{ \"$dateToString\" : { \"format\" : \"%Y-%m-%d\" , \"date\" : \"$date\"}}")); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceCond() { assertThat(transform("cond(qty > 250, 30, 20)")).isEqualTo( - Document - .parse("{ \"$cond\" : { \"if\" : { \"$gt\" : [ \"$qty\" , 250]} , \"then\" : 30 , \"else\" : 20}}")); + Document.parse("{ \"$cond\" : { \"if\" : { \"$gt\" : [ \"$qty\" , 250]} , \"then\" : 30 , \"else\" : 20}}")); } @Test // DATAMONGO-1530 @@ -633,8 +627,7 @@ void shouldRenderOperationNodeAnd() { @Test // DATAMONGO-1530 void shouldRenderComplexOperationNodeAnd() { assertThat(transform("1+2 && concat(a, b) && true")).isEqualTo( - Document - .parse("{ \"$and\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}")); + Document.parse("{ \"$and\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}")); } @Test // DATAMONGO-1530 @@ -644,8 +637,7 @@ void shouldRenderNotCorrectly() { @Test // DATAMONGO-1530 void shouldRenderComplexNotCorrectly() { - assertThat(transform("!(foo > 10)")) - .isEqualTo(Document.parse("{ \"$not\" : [ { \"$gt\" : [ \"$foo\" , 10]}]}")); + assertThat(transform("!(foo > 10)")).isEqualTo(Document.parse("{ \"$not\" : [ { \"$gt\" : [ \"$foo\" , 10]}]}")); } @Test // DATAMONGO-1548 @@ -951,12 +943,14 @@ void shouldRenderRoundWithPlace() { @Test // GH-3712 void shouldRenderCovariancePop() { - assertThat(transform("covariancePop(field1, field2)")).isEqualTo(Document.parse("{ \"$covariancePop\" : [\"$field1\", \"$field2\"]}")); + assertThat(transform("covariancePop(field1, field2)")) + .isEqualTo(Document.parse("{ \"$covariancePop\" : [\"$field1\", \"$field2\"]}")); } @Test // GH-3712 void shouldRenderCovarianceSamp() { - assertThat(transform("covarianceSamp(field1, field2)")).isEqualTo(Document.parse("{ \"$covarianceSamp\" : [\"$field1\", \"$field2\"]}")); + assertThat(transform("covarianceSamp(field1, field2)")) + .isEqualTo(Document.parse("{ \"$covarianceSamp\" : [\"$field1\", \"$field2\"]}")); } @Test // GH-3715 @@ -988,20 +982,21 @@ void rendersShiftWithDefault() { .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }")); } - @Nullable @Test // GH-3716 void shouldRenderDerivative() { - assertThat(transform("derivative(miles, 'hour')")).isEqualTo(Document.parse("{ \"$derivative\" : { input : '$miles', unit : 'hour'} }")); + assertThat(transform("derivative(miles, 'hour')")) + .isEqualTo(Document.parse("{ \"$derivative\" : { input : '$miles', unit : 'hour'} }")); } @Test // GH-3721 - public void shouldRenderIntegral() { + void shouldRenderIntegral() { assertThat(transform("integral(field)")).isEqualTo(Document.parse("{ \"$integral\" : { \"input\" : \"$field\" }}")); } @Test // GH-3721 - public void shouldIntegralWithUnit() { - assertThat(transform("integral(field, 'hour')")).isEqualTo(Document.parse("{ \"$integral\" : { \"input\" : \"$field\", \"unit\" : \"hour\" }}")); + void shouldRenderIntegralWithUnit() { + assertThat(transform("integral(field, 'hour')")) + .isEqualTo(Document.parse("{ \"$integral\" : { \"input\" : \"$field\", \"unit\" : \"hour\" }}")); } private Object transform(String expression, Object... params) { diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index 2624e6c27e..9b00811a7b 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -85,7 +85,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `count` (+++*+++), `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators -| `abs`, `add` (+++*+++ via `plus`), `ceil`, `derivative`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `trunc` +| `abs`, `add` (+++*+++ via `plus`), `ceil`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `trunc` | String Aggregation Operators | `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` From ec16b873b7f52477812c92b7504ec8a5306ede2c Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 28 Jul 2021 08:03:38 +0200 Subject: [PATCH 052/885] Add support for `$degreesToRadians` aggregation operator. Closes: #3714 Original pull request: #3755. --- .../core/aggregation/ConvertOperators.java | 58 +++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 1 + .../ConvertOperatorsUnitTests.java | 7 +++ .../SpelExpressionTransformerUnitTests.java | 5 ++ 4 files changed, 71 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java index 315a463e1f..b34933444a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java @@ -231,6 +231,16 @@ public ToString convertToString() { return ToString.toString(valueObject()); } + /** + * {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to radians.\ + * + * @return new instance of {@link DegreesToRadians}. + * @since 3.3 + */ + public DegreesToRadians convertDegreesToRadians() { + return DegreesToRadians.degreesToRadians(valueObject()); + } + private Convert createConvert() { return usesFieldRef() ? Convert.convertValueOf(fieldReference) : Convert.convertValueOf(expression); } @@ -692,4 +702,52 @@ protected String getMongoMethod() { return "$toString"; } } + + /** + * {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DegreesToRadians extends AbstractAggregationExpression { + + private DegreesToRadians(Object value) { + super(value); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the value of the given field, measured in degrees, to radians. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadiansOf(String fieldName) { + return degreesToRadians(Fields.field(fieldName)); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the result of the given {@link AggregationExpression expression}, measured in degrees, to radians. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadiansOf(AggregationExpression expression) { + return degreesToRadians(expression); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the given value, measured in degrees, to radians. + * + * @param value must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadians(Object value) { + return new DegreesToRadians(value); + } + + @Override + protected String getMongoMethod() { + return "$degreesToRadians"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 9be1368caf..f0799a1af6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -199,6 +199,7 @@ public class MethodReferenceNode extends ExpressionNode { map.put("toLong", singleArgRef().forOperator("$toLong")); map.put("toObjectId", singleArgRef().forOperator("$toObjectId")); map.put("toString", singleArgRef().forOperator("$toString")); + map.put("degreesToRadians", singleArgRef().forOperator("$degreesToRadians")); FUNCTIONS = Collections.unmodifiableMap(map); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConvertOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConvertOperatorsUnitTests.java index a44c932723..c794cf8102 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConvertOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConvertOperatorsUnitTests.java @@ -222,4 +222,11 @@ public void toStringUsingExpression() { assertThat(ConvertOperators.valueOf(EXPRESSION).convertToString().toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo(Document.parse("{ $toString: " + EXPRESSION_STRING + " } ")); } + + @Test // GH-3714 + void degreesToRadiansUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("angle_a").convertDegreesToRadians().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $degreesToRadians : \"$angle_a\"}")); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 0450e556c4..9cad6cbb15 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -941,6 +941,11 @@ void shouldRenderRoundWithPlace() { assertThat(transform("round(field, 2)")).isEqualTo(Document.parse("{ \"$round\" : [\"$field\", 2]}")); } + @Test // GH-3714 + void shouldRenderDegreesToRadians() { + assertThat(transform("degreesToRadians(angle_a)")).isEqualTo(Document.parse("{ \"$degreesToRadians\" : \"$angle_a\"}")); + } + @Test // GH-3712 void shouldRenderCovariancePop() { assertThat(transform("covariancePop(field1, field2)")) From 0db47169cffca0ca3af462c6f61d9178538cfad9 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 28 Jul 2021 09:40:52 +0200 Subject: [PATCH 053/885] Add support for `$sin` and `$sinh` aggregation operators. Closes: #3728 Original pull request: #3755. --- .../core/aggregation/ArithmeticOperators.java | 246 ++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 2 + .../ArithmeticOperatorsUnitTests.java | 28 ++ .../SpelExpressionTransformerUnitTests.java | 10 + 4 files changed, 286 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 159c6bbeae..db328338e3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -31,6 +31,7 @@ import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.WindowUnits; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; /** @@ -676,6 +677,48 @@ public Round roundToPlace(int place) { return round().place(place); } + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in {@link AngularDimension#RADIANS radians}. + * + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Sin sin() { + return sin(AngularDimension.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value in the given {@link AngularDimension unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Sin sin(AngularDimension unit) { + return usesFieldRef() ? Sin.sinOf(fieldReference, unit) : Sin.sinOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in {@link AngularDimension#RADIANS radians}. + * + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Sinh sinh() { + return sinh(AngularDimension.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Sinh sinh(AngularDimension unit) { + return usesFieldRef() ? Sinh.sinhOf(fieldReference, unit) : Sinh.sinhOf(expression, unit); + } + private boolean usesFieldRef() { return fieldReference != null; } @@ -1907,4 +1950,207 @@ protected String getMongoMethod() { return "$integral"; } } + + /** + * The unit of measure for computations that operate upon angles. + * + * @author Christoph Strobl + * @since 3.3 + */ + public enum AngularDimension { + RADIANS, DEGREES + } + + /** + * An {@link AggregationExpression expression} that calculates the sine of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Sin extends AbstractAggregationExpression { + + private Sin(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularDimension#RADIANS radians}. + *

+ * Use {@code sinhOf("angle", DEGREES)} as shortcut for

{ $sinh : { $degreesToRadians : "$angle" } }
. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(String fieldReference) { + return sinOf(fieldReference, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(String fieldReference, AngularDimension unit) { + return sin(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(AggregationExpression expression) { + return sinOf(expression, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(AggregationExpression expression, AngularDimension unit) { + return sin(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Sin}. + */ + public static Sin sin(Object value) { + return sin(value, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sin(Object value, AngularDimension unit) { + + if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + return new Sin(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Sin(value); + } + + @Override + protected String getMongoMethod() { + return "$sin"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Sinh extends AbstractAggregationExpression { + + private Sinh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(String fieldReference) { + return sinhOf(fieldReference, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularDimension unit}. + *

+ * Use {@code sinhOf("angle", DEGREES)} as shortcut for

{ $sinh : { $degreesToRadians : "$angle" } }
. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(String fieldReference, AngularDimension unit) { + return sinh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularDimension#RADIANS}. + *

+ * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(AggregationExpression expression) { + return sinhOf(expression, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularDimension unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(AggregationExpression expression, AngularDimension unit) { + return sinh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinh(Object value) { + return sinh(value, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularDimension unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinh(Object value, AngularDimension unit) { + + if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + return new Sinh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Sinh(value); + } + + @Override + protected String getMongoMethod() { + return "$sinh"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index f0799a1af6..9ee12be1eb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -93,6 +93,8 @@ public class MethodReferenceNode extends ExpressionNode { map.put("round", arrayArgRef().forOperator("$round")); map.put("derivative", mapArgRef().forOperator("$derivative").mappingParametersTo("input", "unit")); map.put("integral", mapArgRef().forOperator("$integral").mappingParametersTo("input", "unit")); + map.put("sin", singleArgRef().forOperator("$sin")); + map.put("sinh", singleArgRef().forOperator("$sinh")); // STRING OPERATORS map.put("concat", arrayArgRef().forOperator("$concat")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index d57363d91c..cc32a94323 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -81,4 +81,32 @@ void rendersIntegralWithUnit() { .toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo("{ $integral : { input : \"$kilowatts\", unit : \"hour\" } }"); } + + @Test // GH-3728 + void rendersSin() { + + assertThat(valueOf("angle").sin().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $sin : \"$angle\" }")); + } + + @Test // GH-3728 + void rendersSinWithValueInDegrees() { + + assertThat(valueOf("angle").sin(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $sin : { $degreesToRadians : \"$angle\" } }")); + } + + @Test // GH-3728 + void rendersSinh() { + + assertThat(valueOf("angle").sinh().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $sinh : \"$angle\" }")); + } + + @Test // GH-3728 + void rendersSinhWithValueInDegrees() { + + assertThat(valueOf("angle").sinh(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $sinh : { $degreesToRadians : \"$angle\" } }")); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 9cad6cbb15..e250241558 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1004,6 +1004,16 @@ void shouldRenderIntegralWithUnit() { .isEqualTo(Document.parse("{ \"$integral\" : { \"input\" : \"$field\", \"unit\" : \"hour\" }}")); } + @Test // GH-3728 + void shouldRenderSin() { + assertThat(transform("sin(angle)")).isEqualTo(Document.parse("{ \"$sin\" : \"$angle\"}")); + } + + @Test // GH-3728 + void shouldRenderSinh() { + assertThat(transform("sinh(angle)")).isEqualTo(Document.parse("{ \"$sinh\" : \"$angle\"}")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From 73d5886aae13082d24b7f49db91da322da509952 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 28 Jul 2021 09:54:05 +0200 Subject: [PATCH 054/885] Add support for `$tan` and `$tanh` aggregation operators. Closes: #3730 Original pull request: #3755. --- .../core/aggregation/ArithmeticOperators.java | 251 ++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 2 + .../ArithmeticOperatorsUnitTests.java | 29 ++ .../SpelExpressionTransformerUnitTests.java | 10 + 4 files changed, 292 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index db328338e3..e26e41f651 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -719,6 +719,51 @@ public Sinh sinh(AngularDimension unit) { return usesFieldRef() ? Sinh.sinhOf(fieldReference, unit) : Sinh.sinhOf(expression, unit); } + /** + * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value given in + * {@link AngularDimension#RADIANS radians}. + * + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Tan tan() { + return tan(AngularDimension.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value in the given + * {@link AngularDimension unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Tan tan(AngularDimension unit) { + return usesFieldRef() ? Tan.tanOf(fieldReference, unit) : Tan.tanOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value given in + * {@link AngularDimension#RADIANS radians}. + * + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Tanh tanh() { + return tanh(AngularDimension.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Tanh tanh(AngularDimension unit) { + return usesFieldRef() ? Tanh.tanhOf(fieldReference, unit) : Tanh.tanhOf(expression, unit); + } + private boolean usesFieldRef() { return fieldReference != null; } @@ -2153,4 +2198,210 @@ protected String getMongoMethod() { return "$sinh"; } } + + /** + * An {@link AggregationExpression expression} that calculates the tangent of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Tan extends AbstractAggregationExpression { + + private Tan(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularDimension#RADIANS radians}. + *

+ * Use {@code tanOf("angle", DEGREES)} as shortcut for + * + *

+		 * { $tan : { $degreesToRadians : "$angle" } }
+		 * 
+ * + * . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(String fieldReference) { + return tanOf(fieldReference, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(String fieldReference, AngularDimension unit) { + return tan(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(AggregationExpression expression) { + return tanOf(expression, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(AggregationExpression expression, AngularDimension unit) { + return tan(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Tan}. + */ + public static Tan tan(Object value) { + return tan(value, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tan(Object value, AngularDimension unit) { + + if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + return new Tan(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Tan(value); + } + + @Override + protected String getMongoMethod() { + return "$tan"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Tanh extends AbstractAggregationExpression { + + private Tanh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(String fieldReference) { + return tanhOf(fieldReference, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularDimension unit}. + *

+ * Use {@code tanhOf("angle", DEGREES)} as shortcut for + * + *

+		 * { $tanh : { $degreesToRadians : "$angle" } }
+		 * 
+ * + * . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(String fieldReference, AngularDimension unit) { + return tanh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularDimension#RADIANS}. + *

+ * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(AggregationExpression expression) { + return tanhOf(expression, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularDimension unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(AggregationExpression expression, AngularDimension unit) { + return tanh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanh(Object value) { + return tanh(value, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularDimension unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanh(Object value, AngularDimension unit) { + + if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + return new Tanh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Tanh(value); + } + + @Override + protected String getMongoMethod() { + return "$tanh"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 9ee12be1eb..a2d011d6ad 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -95,6 +95,8 @@ public class MethodReferenceNode extends ExpressionNode { map.put("integral", mapArgRef().forOperator("$integral").mappingParametersTo("input", "unit")); map.put("sin", singleArgRef().forOperator("$sin")); map.put("sinh", singleArgRef().forOperator("$sinh")); + map.put("tan", singleArgRef().forOperator("$tan")); + map.put("tanh", singleArgRef().forOperator("$tanh")); // STRING OPERATORS map.put("concat", arrayArgRef().forOperator("$concat")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index cc32a94323..9a77d093c4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -109,4 +109,33 @@ void rendersSinhWithValueInDegrees() { assertThat(valueOf("angle").sinh(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo(Document.parse("{ $sinh : { $degreesToRadians : \"$angle\" } }")); } + + @Test // GH-3730 + void rendersTan() { + + assertThat(valueOf("angle").tan().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $tan : \"$angle\" }")); + } + + @Test // GH-3730 + void rendersTanWithValueInDegrees() { + + assertThat(valueOf("angle").tan(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $tan : { $degreesToRadians : \"$angle\" } }")); + } + + @Test // GH-3730 + void rendersTanh() { + + assertThat(valueOf("angle").tanh().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $tanh : \"$angle\" }")); + } + + @Test // GH-3730 + void rendersTanhWithValueInDegrees() { + + assertThat(valueOf("angle").tanh(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $tanh : { $degreesToRadians : \"$angle\" } }")); + } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index e250241558..cc59a91700 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1014,6 +1014,16 @@ void shouldRenderSinh() { assertThat(transform("sinh(angle)")).isEqualTo(Document.parse("{ \"$sinh\" : \"$angle\"}")); } + @Test // GH-3730 + void shouldRenderTan() { + assertThat(transform("tan(angle)")).isEqualTo(Document.parse("{ \"$tan\" : \"$angle\"}")); + } + + @Test // GH-3730 + void shouldRenderTanh() { + assertThat(transform("tanh(angle)")).isEqualTo(Document.parse("{ \"$tanh\" : \"$angle\"}")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From c4c6267d91b89fd299f47a1a8604d86e9e87e53d Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 28 Jul 2021 10:04:29 +0200 Subject: [PATCH 055/885] Add support for `$cos` and `$cosh` aggregation operators. Closes: #3710 Original pull request: #3755. --- .../core/aggregation/ArithmeticOperators.java | 251 ++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 2 + .../ArithmeticOperatorsUnitTests.java | 28 ++ .../SpelExpressionTransformerUnitTests.java | 10 + 4 files changed, 291 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index e26e41f651..4de258b4eb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -719,6 +719,51 @@ public Sinh sinh(AngularDimension unit) { return usesFieldRef() ? Sinh.sinhOf(fieldReference, unit) : Sinh.sinhOf(expression, unit); } + /** + * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value given in + * {@link AngularDimension#RADIANS radians}. + * + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Cos cos() { + return cos(AngularDimension.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value in the given + * {@link AngularDimension unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Cos cos(AngularDimension unit) { + return usesFieldRef() ? Cos.cosOf(fieldReference, unit) : Cos.cosOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value given in + * {@link AngularDimension#RADIANS radians}. + * + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Cosh cosh() { + return cosh(AngularDimension.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Cosh cosh(AngularDimension unit) { + return usesFieldRef() ? Cosh.coshOf(fieldReference, unit) : Cosh.coshOf(expression, unit); + } + /** * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value given in * {@link AngularDimension#RADIANS radians}. @@ -2199,6 +2244,212 @@ protected String getMongoMethod() { } } + /** + * An {@link AggregationExpression expression} that calculates the cosine of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Cos extends AbstractAggregationExpression { + + private Cos(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularDimension#RADIANS radians}. + *

+ * Use {@code cosOf("angle", DEGREES)} as shortcut for + * + *

+		 * { $cos : { $degreesToRadians : "$angle" } }
+		 * 
+ * + * . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(String fieldReference) { + return cosOf(fieldReference, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(String fieldReference, AngularDimension unit) { + return cos(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(AggregationExpression expression) { + return cosOf(expression, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(AggregationExpression expression, AngularDimension unit) { + return cos(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Cos}. + */ + public static Cos cos(Object value) { + return cos(value, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cos(Object value, AngularDimension unit) { + + if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + return new Cos(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Cos(value); + } + + @Override + protected String getMongoMethod() { + return "$cos"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Cosh extends AbstractAggregationExpression { + + private Cosh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(String fieldReference) { + return coshOf(fieldReference, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularDimension unit}. + *

+ * Use {@code coshOf("angle", DEGREES)} as shortcut for + * + *

+		 * { $cosh : { $degreesToRadians : "$angle" } }
+		 * 
+ * + * . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(String fieldReference, AngularDimension unit) { + return cosh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularDimension#RADIANS}. + *

+ * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(AggregationExpression expression) { + return coshOf(expression, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularDimension unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(AggregationExpression expression, AngularDimension unit) { + return cosh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh cosh(Object value) { + return cosh(value, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularDimension unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh cosh(Object value, AngularDimension unit) { + + if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + return new Cosh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Cosh(value); + } + + @Override + protected String getMongoMethod() { + return "$cosh"; + } + } + /** * An {@link AggregationExpression expression} that calculates the tangent of a value that is measured in radians. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index a2d011d6ad..1efe94c757 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -95,6 +95,8 @@ public class MethodReferenceNode extends ExpressionNode { map.put("integral", mapArgRef().forOperator("$integral").mappingParametersTo("input", "unit")); map.put("sin", singleArgRef().forOperator("$sin")); map.put("sinh", singleArgRef().forOperator("$sinh")); + map.put("cos", singleArgRef().forOperator("$cos")); + map.put("cosh", singleArgRef().forOperator("$cosh")); map.put("tan", singleArgRef().forOperator("$tan")); map.put("tanh", singleArgRef().forOperator("$tanh")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index 9a77d093c4..55d1647568 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -110,6 +110,34 @@ void rendersSinhWithValueInDegrees() { .isEqualTo(Document.parse("{ $sinh : { $degreesToRadians : \"$angle\" } }")); } + @Test // GH-3710 + void rendersCos() { + + assertThat(valueOf("angle").cos().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $cos : \"$angle\" }")); + } + + @Test // GH-3710 + void rendersCosWithValueInDegrees() { + + assertThat(valueOf("angle").cos(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $cos : { $degreesToRadians : \"$angle\" } }")); + } + + @Test // GH-3710 + void rendersCosh() { + + assertThat(valueOf("angle").cosh().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $cosh : \"$angle\" }")); + } + + @Test // GH-3710 + void rendersCoshWithValueInDegrees() { + + assertThat(valueOf("angle").cosh(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $cosh : { $degreesToRadians : \"$angle\" } }")); + } + @Test // GH-3730 void rendersTan() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index cc59a91700..e30f7f9fb9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1014,6 +1014,16 @@ void shouldRenderSinh() { assertThat(transform("sinh(angle)")).isEqualTo(Document.parse("{ \"$sinh\" : \"$angle\"}")); } + @Test // GH-3710 + void shouldRenderCos() { + assertThat(transform("cos(angle)")).isEqualTo(Document.parse("{ \"$cos\" : \"$angle\"}")); + } + + @Test // GH-3710 + void shouldRenderCosh() { + assertThat(transform("cosh(angle)")).isEqualTo(Document.parse("{ \"$cosh\" : \"$angle\"}")); + } + @Test // GH-3730 void shouldRenderTan() { assertThat(transform("tan(angle)")).isEqualTo(Document.parse("{ \"$tan\" : \"$angle\"}")); From df0372eee1368a5b0c03de1ad3bda9e1aaecf9e7 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 24 Aug 2021 16:11:53 +0200 Subject: [PATCH 056/885] Polishing. Rename AngularDimension to AngularUnit. Tweak Javadoc. Simplify tests. Update reference docs. See: #3710, #3714, #3728, #3730 Original pull request: #3755. --- .../core/aggregation/ArithmeticOperators.java | 244 ++++++----- .../core/aggregation/ConvertOperators.java | 3 +- .../ArithmeticOperatorsUnitTests.java | 36 +- .../SpelExpressionTransformerUnitTests.java | 402 +++++++++--------- .../reference/aggregation-framework.adoc | 4 +- 5 files changed, 355 insertions(+), 334 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 4de258b4eb..7896486abf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -678,34 +678,37 @@ public Round roundToPlace(int place) { } /** - * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in {@link AngularDimension#RADIANS radians}. + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. * * @return new instance of {@link Sin}. * @since 3.3 */ public Sin sin() { - return sin(AngularDimension.RADIANS); + return sin(AngularUnit.RADIANS); } /** - * Creates new {@link AggregationExpression} that calculates the sine of a numeric value in the given {@link AngularDimension unit}. + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value in the given + * {@link AngularUnit unit}. * * @param unit the unit of measure. * @return new instance of {@link Sin}. * @since 3.3 */ - public Sin sin(AngularDimension unit) { + public Sin sin(AngularUnit unit) { return usesFieldRef() ? Sin.sinOf(fieldReference, unit) : Sin.sinOf(expression, unit); } /** - * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in {@link AngularDimension#RADIANS radians}. + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. * * @return new instance of {@link Sin}. * @since 3.3 */ public Sinh sinh() { - return sinh(AngularDimension.RADIANS); + return sinh(AngularUnit.RADIANS); } /** @@ -715,42 +718,42 @@ public Sinh sinh() { * @return new instance of {@link Sin}. * @since 3.3 */ - public Sinh sinh(AngularDimension unit) { + public Sinh sinh(AngularUnit unit) { return usesFieldRef() ? Sinh.sinhOf(fieldReference, unit) : Sinh.sinhOf(expression, unit); } /** * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value given in - * {@link AngularDimension#RADIANS radians}. + * {@link AngularUnit#RADIANS radians}. * * @return new instance of {@link Sin}. * @since 3.3 */ public Cos cos() { - return cos(AngularDimension.RADIANS); + return cos(AngularUnit.RADIANS); } /** * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param unit the unit of measure. * @return new instance of {@link Sin}. * @since 3.3 */ - public Cos cos(AngularDimension unit) { + public Cos cos(AngularUnit unit) { return usesFieldRef() ? Cos.cosOf(fieldReference, unit) : Cos.cosOf(expression, unit); } /** * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value given in - * {@link AngularDimension#RADIANS radians}. + * {@link AngularUnit#RADIANS radians}. * * @return new instance of {@link Sin}. * @since 3.3 */ public Cosh cosh() { - return cosh(AngularDimension.RADIANS); + return cosh(AngularUnit.RADIANS); } /** @@ -760,42 +763,42 @@ public Cosh cosh() { * @return new instance of {@link Sin}. * @since 3.3 */ - public Cosh cosh(AngularDimension unit) { + public Cosh cosh(AngularUnit unit) { return usesFieldRef() ? Cosh.coshOf(fieldReference, unit) : Cosh.coshOf(expression, unit); } /** * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value given in - * {@link AngularDimension#RADIANS radians}. + * {@link AngularUnit#RADIANS radians}. * * @return new instance of {@link Sin}. * @since 3.3 */ public Tan tan() { - return tan(AngularDimension.RADIANS); + return tan(AngularUnit.RADIANS); } /** * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param unit the unit of measure. * @return new instance of {@link Sin}. * @since 3.3 */ - public Tan tan(AngularDimension unit) { + public Tan tan(AngularUnit unit) { return usesFieldRef() ? Tan.tanOf(fieldReference, unit) : Tan.tanOf(expression, unit); } /** * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value given in - * {@link AngularDimension#RADIANS radians}. + * {@link AngularUnit#RADIANS radians}. * * @return new instance of {@link Sin}. * @since 3.3 */ public Tanh tanh() { - return tanh(AngularDimension.RADIANS); + return tanh(AngularUnit.RADIANS); } /** @@ -805,7 +808,7 @@ public Tanh tanh() { * @return new instance of {@link Sin}. * @since 3.3 */ - public Tanh tanh(AngularDimension unit) { + public Tanh tanh(AngularUnit unit) { return usesFieldRef() ? Tanh.tanhOf(fieldReference, unit) : Tanh.tanhOf(expression, unit); } @@ -2047,7 +2050,7 @@ protected String getMongoMethod() { * @author Christoph Strobl * @since 3.3 */ - public enum AngularDimension { + public enum AngularUnit { RADIANS, DEGREES } @@ -2065,76 +2068,82 @@ private Sin(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in - * {@link AngularDimension#RADIANS radians}. + * {@link AngularUnit#RADIANS radians}. *

- * Use {@code sinhOf("angle", DEGREES)} as shortcut for

{ $sinh : { $degreesToRadians : "$angle" } }
. + * Use {@code sinhOf("angle", DEGREES)} as shortcut for + * + *
+		 * { $sinh : { $degreesToRadians : "$angle" } }
+		 * 
+ * + * . * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link Sin}. */ public static Sin sinOf(String fieldReference) { - return sinOf(fieldReference, AngularDimension.RADIANS); + return sinOf(fieldReference, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Sin}. */ - public static Sin sinOf(String fieldReference, AngularDimension unit) { + public static Sin sinOf(String fieldReference, AngularUnit unit) { return sin(Fields.field(fieldReference), unit); } /** * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link Sin}. */ public static Sin sinOf(AggregationExpression expression) { - return sinOf(expression, AngularDimension.RADIANS); + return sinOf(expression, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Sin}. */ - public static Sin sinOf(AggregationExpression expression, AngularDimension unit) { + public static Sin sinOf(AggregationExpression expression, AngularUnit unit) { return sin(expression, unit); } /** * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value * @return new instance of {@link Sin}. */ public static Sin sin(Object value) { - return sin(value, AngularDimension.RADIANS); + return sin(value, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Sin}. */ - public static Sin sin(Object value, AngularDimension unit) { + public static Sin sin(Object value, AngularUnit unit) { - if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { return new Sin(ConvertOperators.DegreesToRadians.degreesToRadians(value)); } return new Sin(value); @@ -2148,7 +2157,7 @@ protected String getMongoMethod() { /** * An {@link AggregationExpression expression} that calculates the hyperbolic sine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @author Christoph Strobl * @since 3.3 @@ -2161,78 +2170,85 @@ private Sinh(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link Sin}. */ public static Sinh sinhOf(String fieldReference) { - return sinhOf(fieldReference, AngularDimension.RADIANS); + return sinhOf(fieldReference, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. *

- * Use {@code sinhOf("angle", DEGREES)} as shortcut for

{ $sinh : { $degreesToRadians : "$angle" } }
. + * Use {@code sinhOf("angle", DEGREES)} as shortcut for + * + *
+		 * { $sinh : { $degreesToRadians : "$angle" } }
+		 * 
+ * + * . * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Sin}. */ - public static Sinh sinhOf(String fieldReference, AngularDimension unit) { + public static Sinh sinhOf(String fieldReference, AngularUnit unit) { return sinh(Fields.field(fieldReference), unit); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. *

- * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link Sin}. */ public static Sinh sinhOf(AggregationExpression expression) { - return sinhOf(expression, AngularDimension.RADIANS); + return sinhOf(expression, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Sin}. */ - public static Sinh sinhOf(AggregationExpression expression, AngularDimension unit) { + public static Sinh sinhOf(AggregationExpression expression, AngularUnit unit) { return sinh(expression, unit); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value. * @return new instance of {@link Sin}. */ public static Sinh sinh(Object value) { - return sinh(value, AngularDimension.RADIANS); + return sinh(value, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Sin}. */ - public static Sinh sinh(Object value, AngularDimension unit) { + public static Sinh sinh(Object value, AngularUnit unit) { - if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { return new Sinh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); } return new Sinh(value); @@ -2258,82 +2274,82 @@ private Cos(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in - * {@link AngularDimension#RADIANS radians}. + * {@link AngularUnit#RADIANS radians}. *

* Use {@code cosOf("angle", DEGREES)} as shortcut for - * + * *

 		 * { $cos : { $degreesToRadians : "$angle" } }
 		 * 
- * + * * . * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link Cos}. */ public static Cos cosOf(String fieldReference) { - return cosOf(fieldReference, AngularDimension.RADIANS); + return cosOf(fieldReference, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Cos}. */ - public static Cos cosOf(String fieldReference, AngularDimension unit) { + public static Cos cosOf(String fieldReference, AngularUnit unit) { return cos(Fields.field(fieldReference), unit); } /** * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link Cos}. */ public static Cos cosOf(AggregationExpression expression) { - return cosOf(expression, AngularDimension.RADIANS); + return cosOf(expression, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Cos}. */ - public static Cos cosOf(AggregationExpression expression, AngularDimension unit) { + public static Cos cosOf(AggregationExpression expression, AngularUnit unit) { return cos(expression, unit); } /** * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value * @return new instance of {@link Cos}. */ public static Cos cos(Object value) { - return cos(value, AngularDimension.RADIANS); + return cos(value, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Cos}. */ - public static Cos cos(Object value, AngularDimension unit) { + public static Cos cos(Object value, AngularUnit unit) { - if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { return new Cos(ConvertOperators.DegreesToRadians.degreesToRadians(value)); } return new Cos(value); @@ -2347,7 +2363,7 @@ protected String getMongoMethod() { /** * An {@link AggregationExpression expression} that calculates the hyperbolic cosine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @author Christoph Strobl * @since 3.3 @@ -2360,38 +2376,38 @@ private Cosh(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link Cosh}. */ public static Cosh coshOf(String fieldReference) { - return coshOf(fieldReference, AngularDimension.RADIANS); + return coshOf(fieldReference, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. *

* Use {@code coshOf("angle", DEGREES)} as shortcut for - * + * *

 		 * { $cosh : { $degreesToRadians : "$angle" } }
 		 * 
- * + * * . * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Cosh}. */ - public static Cosh coshOf(String fieldReference, AngularDimension unit) { + public static Cosh coshOf(String fieldReference, AngularUnit unit) { return cosh(Fields.field(fieldReference), unit); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. *

* Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. @@ -2400,45 +2416,45 @@ public static Cosh coshOf(String fieldReference, AngularDimension unit) { * @return new instance of {@link Cosh}. */ public static Cosh coshOf(AggregationExpression expression) { - return coshOf(expression, AngularDimension.RADIANS); + return coshOf(expression, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Cosh}. */ - public static Cosh coshOf(AggregationExpression expression, AngularDimension unit) { + public static Cosh coshOf(AggregationExpression expression, AngularUnit unit) { return cosh(expression, unit); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value. * @return new instance of {@link Cosh}. */ public static Cosh cosh(Object value) { - return cosh(value, AngularDimension.RADIANS); + return cosh(value, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Cosh}. */ - public static Cosh cosh(Object value, AngularDimension unit) { + public static Cosh cosh(Object value, AngularUnit unit) { - if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { return new Cosh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); } return new Cosh(value); @@ -2464,82 +2480,82 @@ private Tan(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in - * {@link AngularDimension#RADIANS radians}. + * {@link AngularUnit#RADIANS radians}. *

* Use {@code tanOf("angle", DEGREES)} as shortcut for - * + * *

 		 * { $tan : { $degreesToRadians : "$angle" } }
 		 * 
- * + * * . * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link Tan}. */ public static Tan tanOf(String fieldReference) { - return tanOf(fieldReference, AngularDimension.RADIANS); + return tanOf(fieldReference, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Tan}. */ - public static Tan tanOf(String fieldReference, AngularDimension unit) { + public static Tan tanOf(String fieldReference, AngularUnit unit) { return tan(Fields.field(fieldReference), unit); } /** * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link Tan}. */ public static Tan tanOf(AggregationExpression expression) { - return tanOf(expression, AngularDimension.RADIANS); + return tanOf(expression, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Tan}. */ - public static Tan tanOf(AggregationExpression expression, AngularDimension unit) { + public static Tan tanOf(AggregationExpression expression, AngularUnit unit) { return tan(expression, unit); } /** * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value * @return new instance of {@link Tan}. */ public static Tan tan(Object value) { - return tan(value, AngularDimension.RADIANS); + return tan(value, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Tan}. */ - public static Tan tan(Object value, AngularDimension unit) { + public static Tan tan(Object value, AngularUnit unit) { - if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { return new Tan(ConvertOperators.DegreesToRadians.degreesToRadians(value)); } return new Tan(value); @@ -2553,7 +2569,7 @@ protected String getMongoMethod() { /** * An {@link AggregationExpression expression} that calculates the hyperbolic tangent of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @author Christoph Strobl * @since 3.3 @@ -2566,38 +2582,38 @@ private Tanh(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link Tanh}. */ public static Tanh tanhOf(String fieldReference) { - return tanhOf(fieldReference, AngularDimension.RADIANS); + return tanhOf(fieldReference, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. *

* Use {@code tanhOf("angle", DEGREES)} as shortcut for - * + * *

 		 * { $tanh : { $degreesToRadians : "$angle" } }
 		 * 
- * + * * . * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Tanh}. */ - public static Tanh tanhOf(String fieldReference, AngularDimension unit) { + public static Tanh tanhOf(String fieldReference, AngularUnit unit) { return tanh(Fields.field(fieldReference), unit); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. *

* Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. @@ -2606,45 +2622,45 @@ public static Tanh tanhOf(String fieldReference, AngularDimension unit) { * @return new instance of {@link Tanh}. */ public static Tanh tanhOf(AggregationExpression expression) { - return tanhOf(expression, AngularDimension.RADIANS); + return tanhOf(expression, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Tanh}. */ - public static Tanh tanhOf(AggregationExpression expression, AngularDimension unit) { + public static Tanh tanhOf(AggregationExpression expression, AngularUnit unit) { return tanh(expression, unit); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value. * @return new instance of {@link Tanh}. */ public static Tanh tanh(Object value) { - return tanh(value, AngularDimension.RADIANS); + return tanh(value, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Tanh}. */ - public static Tanh tanh(Object value, AngularDimension unit) { + public static Tanh tanh(Object value, AngularUnit unit) { - if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { return new Tanh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); } return new Tanh(value); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java index b34933444a..637ebd8d8f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java @@ -232,7 +232,8 @@ public ToString convertToString() { } /** - * {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to radians.\ + * {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to + * radians. * * @return new instance of {@link DegreesToRadians}. * @since 3.3 diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index 55d1647568..7cde7cd1c4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -86,84 +86,84 @@ void rendersIntegralWithUnit() { void rendersSin() { assertThat(valueOf("angle").sin().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $sin : \"$angle\" }")); + .isEqualTo("{ $sin : \"$angle\" }"); } @Test // GH-3728 void rendersSinWithValueInDegrees() { - assertThat(valueOf("angle").sin(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $sin : { $degreesToRadians : \"$angle\" } }")); + assertThat(valueOf("angle").sin(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $sin : { $degreesToRadians : \"$angle\" } }"); } @Test // GH-3728 void rendersSinh() { assertThat(valueOf("angle").sinh().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $sinh : \"$angle\" }")); + .isEqualTo("{ $sinh : \"$angle\" }"); } @Test // GH-3728 void rendersSinhWithValueInDegrees() { - assertThat(valueOf("angle").sinh(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $sinh : { $degreesToRadians : \"$angle\" } }")); + assertThat(valueOf("angle").sinh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $sinh : { $degreesToRadians : \"$angle\" } }"); } @Test // GH-3710 void rendersCos() { assertThat(valueOf("angle").cos().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $cos : \"$angle\" }")); + .isEqualTo("{ $cos : \"$angle\" }"); } @Test // GH-3710 void rendersCosWithValueInDegrees() { - assertThat(valueOf("angle").cos(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $cos : { $degreesToRadians : \"$angle\" } }")); + assertThat(valueOf("angle").cos(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $cos : { $degreesToRadians : \"$angle\" } }"); } @Test // GH-3710 void rendersCosh() { assertThat(valueOf("angle").cosh().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $cosh : \"$angle\" }")); + .isEqualTo("{ $cosh : \"$angle\" }"); } @Test // GH-3710 void rendersCoshWithValueInDegrees() { - assertThat(valueOf("angle").cosh(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $cosh : { $degreesToRadians : \"$angle\" } }")); + assertThat(valueOf("angle").cosh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $cosh : { $degreesToRadians : \"$angle\" } }"); } @Test // GH-3730 void rendersTan() { assertThat(valueOf("angle").tan().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $tan : \"$angle\" }")); + .isEqualTo("{ $tan : \"$angle\" }"); } @Test // GH-3730 void rendersTanWithValueInDegrees() { - assertThat(valueOf("angle").tan(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $tan : { $degreesToRadians : \"$angle\" } }")); + assertThat(valueOf("angle").tan(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $tan : { $degreesToRadians : \"$angle\" } }"); } @Test // GH-3730 void rendersTanh() { assertThat(valueOf("angle").tanh().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $tanh : \"$angle\" }")); + .isEqualTo("{ $tanh : \"$angle\" }"); } @Test // GH-3730 void rendersTanhWithValueInDegrees() { - assertThat(valueOf("angle").tanh(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $tanh : { $degreesToRadians : \"$angle\" } }")); + assertThat(valueOf("angle").tanh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $tanh : { $degreesToRadians : \"$angle\" } }"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index e30f7f9fb9..193ffb520d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -15,7 +15,7 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.util.Arrays; @@ -53,21 +53,21 @@ void beforeEach() { @Test // DATAMONGO-774 void shouldRenderConstantExpression() { - assertThat(transform("1")).isEqualTo("1"); - assertThat(transform("-1")).isEqualTo("-1"); - assertThat(transform("1.0")).isEqualTo("1.0"); - assertThat(transform("-1.0")).isEqualTo("-1.0"); - assertThat(transform("null")).isNull(); + assertThat(transformValue("1")).isEqualTo("1"); + assertThat(transformValue("-1")).isEqualTo("-1"); + assertThat(transformValue("1.0")).isEqualTo("1.0"); + assertThat(transformValue("-1.0")).isEqualTo("-1.0"); + assertThat(transformValue("null")).isNull(); } @Test // DATAMONGO-774 void shouldSupportKnownOperands() { - assertThat(transform("a + b")).isEqualTo(Document.parse("{ \"$add\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a - b")).isEqualTo(Document.parse("{ \"$subtract\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a * b")).isEqualTo(Document.parse("{ \"$multiply\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a / b")).isEqualTo(Document.parse("{ \"$divide\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a % b")).isEqualTo(Document.parse("{ \"$mod\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("a + b")).isEqualTo("{ \"$add\" : [ \"$a\" , \"$b\"]}"); + assertThat(transform("a - b")).isEqualTo("{ \"$subtract\" : [ \"$a\" , \"$b\"]}"); + assertThat(transform("a * b")).isEqualTo("{ \"$multiply\" : [ \"$a\" , \"$b\"]}"); + assertThat(transform("a / b")).isEqualTo("{ \"$divide\" : [ \"$a\" , \"$b\"]}"); + assertThat(transform("a % b")).isEqualTo("{ \"$mod\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-774 @@ -77,35 +77,35 @@ void shouldThrowExceptionOnUnknownOperand() { @Test // DATAMONGO-774 void shouldRenderSumExpression() { - assertThat(transform("a + 1")).isEqualTo(Document.parse("{ \"$add\" : [ \"$a\" , 1]}")); + assertThat(transform("a + 1")).isEqualTo("{ \"$add\" : [ \"$a\" , 1]}"); } @Test // DATAMONGO-774 void shouldRenderFormula() { - assertThat(transform("(netPrice + surCharge) * taxrate + 42")).isEqualTo(Document.parse( - "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}")); + assertThat(transform("(netPrice + surCharge) * taxrate + 42")).isEqualTo( + "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}"); } @Test // DATAMONGO-774 void shouldRenderFormulaInCurlyBrackets() { - assertThat(transform("{(netPrice + surCharge) * taxrate + 42}")).isEqualTo(Document.parse( - "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}")); + assertThat(transform("{(netPrice + surCharge) * taxrate + 42}")).isEqualTo( + "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}"); } @Test // DATAMONGO-774 void shouldRenderFieldReference() { - assertThat(transform("foo")).isEqualTo("$foo"); - assertThat(transform("$foo")).isEqualTo("$foo"); + assertThat(transformValue("foo")).isEqualTo("$foo"); + assertThat(transformValue("$foo")).isEqualTo("$foo"); } @Test // DATAMONGO-774 void shouldRenderNestedFieldReference() { - assertThat(transform("foo.bar")).isEqualTo("$foo.bar"); - assertThat(transform("$foo.bar")).isEqualTo("$foo.bar"); + assertThat(transformValue("foo.bar")).isEqualTo("$foo.bar"); + assertThat(transformValue("$foo.bar")).isEqualTo("$foo.bar"); } @Test // DATAMONGO-774 @@ -113,52 +113,52 @@ void shouldRenderNestedFieldReference() { void shouldRenderNestedIndexedFieldReference() { // TODO add support for rendering nested indexed field references - assertThat(transform("foo[3].bar")).isEqualTo("$foo[3].bar"); + assertThat(transformValue("foo[3].bar")).isEqualTo("$foo[3].bar"); } @Test // DATAMONGO-774 void shouldRenderConsecutiveOperation() { - assertThat(transform("1 + 1 + 1")).isEqualTo(Document.parse("{ \"$add\" : [ 1 , 1 , 1]}")); + assertThat(transform("1 + 1 + 1")).isEqualTo("{ \"$add\" : [ 1 , 1 , 1]}"); } @Test // DATAMONGO-774 void shouldRenderComplexExpression0() { assertThat(transform("-(1 + q)")) - .isEqualTo(Document.parse("{ \"$multiply\" : [ -1 , { \"$add\" : [ 1 , \"$q\"]}]}")); + .isEqualTo("{ \"$multiply\" : [ -1 , { \"$add\" : [ 1 , \"$q\"]}]}"); } @Test // DATAMONGO-774 void shouldRenderComplexExpression1() { - assertThat(transform("1 + (q + 1) / (q - 1)")).isEqualTo(Document.parse( - "{ \"$add\" : [ 1 , { \"$divide\" : [ { \"$add\" : [ \"$q\" , 1]} , { \"$subtract\" : [ \"$q\" , 1]}]}]}")); + assertThat(transform("1 + (q + 1) / (q - 1)")).isEqualTo( + "{ \"$add\" : [ 1 , { \"$divide\" : [ { \"$add\" : [ \"$q\" , 1]} , { \"$subtract\" : [ \"$q\" , 1]}]}]}"); } @Test // DATAMONGO-774 void shouldRenderComplexExpression2() { - assertThat(transform("(q + 1 + 4 - 5) / (q + 1 + 3 + 4)")).isEqualTo(Document.parse( - "{ \"$divide\" : [ { \"$subtract\" : [ { \"$add\" : [ \"$q\" , 1 , 4]} , 5]} , { \"$add\" : [ \"$q\" , 1 , 3 , 4]}]}")); + assertThat(transform("(q + 1 + 4 - 5) / (q + 1 + 3 + 4)")).isEqualTo( + "{ \"$divide\" : [ { \"$subtract\" : [ { \"$add\" : [ \"$q\" , 1 , 4]} , 5]} , { \"$add\" : [ \"$q\" , 1 , 3 , 4]}]}"); } @Test // DATAMONGO-774 void shouldRenderBinaryExpressionWithMixedSignsCorrectly() { - assertThat(transform("-4 + 1")).isEqualTo(Document.parse("{ \"$add\" : [ -4 , 1]}")); - assertThat(transform("1 + -4")).isEqualTo(Document.parse("{ \"$add\" : [ 1 , -4]}")); + assertThat(transform("-4 + 1")).isEqualTo("{ \"$add\" : [ -4 , 1]}"); + assertThat(transform("1 + -4")).isEqualTo("{ \"$add\" : [ 1 , -4]}"); } @Test // DATAMONGO-774 void shouldRenderConsecutiveOperationsInComplexExpression() { assertThat(transform("1 + 1 + (1 + 1 + 1) / q")) - .isEqualTo(Document.parse("{ \"$add\" : [ 1 , 1 , { \"$divide\" : [ { \"$add\" : [ 1 , 1 , 1]} , \"$q\"]}]}")); + .isEqualTo("{ \"$add\" : [ 1 , 1 , { \"$divide\" : [ { \"$add\" : [ 1 , 1 , 1]} , \"$q\"]}]}"); } @Test // DATAMONGO-774 void shouldRenderParameterExpressionResults() { - assertThat(transform("[0] + [1] + [2]", 1, 2, 3)).isEqualTo(Document.parse("{ \"$add\" : [ 1 , 2 , 3]}")); + assertThat(transform("[0] + [1] + [2]", 1, 2, 3)).isEqualTo("{ \"$add\" : [ 1 , 2 , 3]}"); } @Test // DATAMONGO-774 @@ -189,852 +189,856 @@ void shouldRenderCompoundExpressionsWithIndexerAndFieldReference() { Person person = new Person(); person.setAge(10); - assertThat(transform("[0].age + a.c", person)).isEqualTo(Document.parse("{ \"$add\" : [ 10 , \"$a.c\"] }")); + assertThat(transform("[0].age + a.c", person)).isEqualTo("{ \"$add\" : [ 10 , \"$a.c\"] }"); } @Test // DATAMONGO-840 void shouldRenderCompoundExpressionsWithOnlyFieldReferences() { - assertThat(transform("a.b + a.c")).isEqualTo(Document.parse("{ \"$add\" : [ \"$a.b\" , \"$a.c\"]}")); + assertThat(transform("a.b + a.c")).isEqualTo("{ \"$add\" : [ \"$a.b\" , \"$a.c\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeAnd() { - assertThat(transform("and(a, b)")).isEqualTo(Document.parse("{ \"$and\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("and(a, b)")).isEqualTo("{ \"$and\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeOr() { - assertThat(transform("or(a, b)")).isEqualTo(Document.parse("{ \"$or\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("or(a, b)")).isEqualTo("{ \"$or\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeNot() { - assertThat(transform("not(a)")).isEqualTo(Document.parse("{ \"$not\" : [ \"$a\"]}")); + assertThat(transform("not(a)")).isEqualTo("{ \"$not\" : [ \"$a\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeSetEquals() { - assertThat(transform("setEquals(a, b)")).isEqualTo(Document.parse("{ \"$setEquals\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("setEquals(a, b)")).isEqualTo("{ \"$setEquals\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeSetEqualsForArrays() { assertThat(transform("setEquals(new int[]{1,2,3}, new int[]{4,5,6})")) - .isEqualTo(Document.parse("{ \"$setEquals\" : [ [ 1 , 2 , 3] , [ 4 , 5 , 6]]}")); + .isEqualTo("{ \"$setEquals\" : [ [ 1 , 2 , 3] , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeSetEqualsMixedArrays() { assertThat(transform("setEquals(a, new int[]{4,5,6})")) - .isEqualTo(Document.parse("{ \"$setEquals\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo("{ \"$setEquals\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceSetIntersection() { assertThat(transform("setIntersection(a, new int[]{4,5,6})")) - .isEqualTo(Document.parse("{ \"$setIntersection\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo("{ \"$setIntersection\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceSetUnion() { assertThat(transform("setUnion(a, new int[]{4,5,6})")) - .isEqualTo(Document.parse("{ \"$setUnion\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo("{ \"$setUnion\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceSeDifference() { assertThat(transform("setDifference(a, new int[]{4,5,6})")) - .isEqualTo(Document.parse("{ \"$setDifference\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo("{ \"$setDifference\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceSetIsSubset() { assertThat(transform("setIsSubset(a, new int[]{4,5,6})")) - .isEqualTo(Document.parse("{ \"$setIsSubset\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo("{ \"$setIsSubset\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceAnyElementTrue() { - assertThat(transform("anyElementTrue(a)")).isEqualTo(Document.parse("{ \"$anyElementTrue\" : [ \"$a\"]}")); + assertThat(transform("anyElementTrue(a)")).isEqualTo("{ \"$anyElementTrue\" : [ \"$a\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceAllElementsTrue() { assertThat(transform("allElementsTrue(a, new int[]{4,5,6})")) - .isEqualTo(Document.parse("{ \"$allElementsTrue\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo("{ \"$allElementsTrue\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceCmp() { - assertThat(transform("cmp(a, 250)")).isEqualTo(Document.parse("{ \"$cmp\" : [ \"$a\" , 250]}")); + assertThat(transform("cmp(a, 250)")).isEqualTo("{ \"$cmp\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceEq() { - assertThat(transform("eq(a, 250)")).isEqualTo(Document.parse("{ \"$eq\" : [ \"$a\" , 250]}")); + assertThat(transform("eq(a, 250)")).isEqualTo("{ \"$eq\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceGt() { - assertThat(transform("gt(a, 250)")).isEqualTo(Document.parse("{ \"$gt\" : [ \"$a\" , 250]}")); + assertThat(transform("gt(a, 250)")).isEqualTo("{ \"$gt\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceGte() { - assertThat(transform("gte(a, 250)")).isEqualTo(Document.parse("{ \"$gte\" : [ \"$a\" , 250]}")); + assertThat(transform("gte(a, 250)")).isEqualTo("{ \"$gte\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceLt() { - assertThat(transform("lt(a, 250)")).isEqualTo(Document.parse("{ \"$lt\" : [ \"$a\" , 250]}")); + assertThat(transform("lt(a, 250)")).isEqualTo("{ \"$lt\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceLte() { - assertThat(transform("lte(a, 250)")).isEqualTo(Document.parse("{ \"$lte\" : [ \"$a\" , 250]}")); + assertThat(transform("lte(a, 250)")).isEqualTo("{ \"$lte\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNe() { - assertThat(transform("ne(a, 250)")).isEqualTo(Document.parse("{ \"$ne\" : [ \"$a\" , 250]}")); + assertThat(transform("ne(a, 250)")).isEqualTo("{ \"$ne\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceAbs() { - assertThat(transform("abs(1)")).isEqualTo(Document.parse("{ \"$abs\" : 1}")); + assertThat(transform("abs(1)")).isEqualTo("{ \"$abs\" : 1}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceAdd() { - assertThat(transform("add(a, 250)")).isEqualTo(Document.parse("{ \"$add\" : [ \"$a\" , 250]}")); + assertThat(transform("add(a, 250)")).isEqualTo("{ \"$add\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceCeil() { - assertThat(transform("ceil(7.8)")).isEqualTo(Document.parse("{ \"$ceil\" : 7.8}")); + assertThat(transform("ceil(7.8)")).isEqualTo("{ \"$ceil\" : 7.8}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceDivide() { - assertThat(transform("divide(a, 250)")).isEqualTo(Document.parse("{ \"$divide\" : [ \"$a\" , 250]}")); + assertThat(transform("divide(a, 250)")).isEqualTo("{ \"$divide\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceExp() { - assertThat(transform("exp(2)")).isEqualTo(Document.parse("{ \"$exp\" : 2}")); + assertThat(transform("exp(2)")).isEqualTo("{ \"$exp\" : 2}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceFloor() { - assertThat(transform("floor(2)")).isEqualTo(Document.parse("{ \"$floor\" : 2}")); + assertThat(transform("floor(2)")).isEqualTo("{ \"$floor\" : 2}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceLn() { - assertThat(transform("ln(2)")).isEqualTo(Document.parse("{ \"$ln\" : 2}")); + assertThat(transform("ln(2)")).isEqualTo("{ \"$ln\" : 2}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceLog() { - assertThat(transform("log(100, 10)")).isEqualTo(Document.parse("{ \"$log\" : [ 100 , 10]}")); + assertThat(transform("log(100, 10)")).isEqualTo("{ \"$log\" : [ 100 , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceLog10() { - assertThat(transform("log10(100)")).isEqualTo(Document.parse("{ \"$log10\" : 100}")); + assertThat(transform("log10(100)")).isEqualTo("{ \"$log10\" : 100}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeMod() { - assertThat(transform("mod(a, b)")).isEqualTo(Document.parse("{ \"$mod\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("mod(a, b)")).isEqualTo("{ \"$mod\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeMultiply() { - assertThat(transform("multiply(a, b)")).isEqualTo(Document.parse("{ \"$multiply\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("multiply(a, b)")).isEqualTo("{ \"$multiply\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodePow() { - assertThat(transform("pow(a, 2)")).isEqualTo(Document.parse("{ \"$pow\" : [ \"$a\" , 2]}")); + assertThat(transform("pow(a, 2)")).isEqualTo("{ \"$pow\" : [ \"$a\" , 2]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceSqrt() { - assertThat(transform("sqrt(2)")).isEqualTo(Document.parse("{ \"$sqrt\" : 2}")); + assertThat(transform("sqrt(2)")).isEqualTo("{ \"$sqrt\" : 2}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeSubtract() { - assertThat(transform("subtract(a, b)")).isEqualTo(Document.parse("{ \"$subtract\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("subtract(a, b)")).isEqualTo("{ \"$subtract\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceTrunc() { - assertThat(transform("trunc(2.1)")).isEqualTo(Document.parse("{ \"$trunc\" : 2.1}")); + assertThat(transform("trunc(2.1)")).isEqualTo("{ \"$trunc\" : 2.1}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeConcat() { - assertThat(transform("concat(a, b, 'c')")).isEqualTo(Document.parse("{ \"$concat\" : [ \"$a\" , \"$b\" , \"c\"]}")); + assertThat(transform("concat(a, b, 'c')")).isEqualTo("{ \"$concat\" : [ \"$a\" , \"$b\" , \"c\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeSubstrc() { - assertThat(transform("substr(a, 0, 1)")).isEqualTo(Document.parse("{ \"$substr\" : [ \"$a\" , 0 , 1]}")); + assertThat(transform("substr(a, 0, 1)")).isEqualTo("{ \"$substr\" : [ \"$a\" , 0 , 1]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceToLower() { - assertThat(transform("toLower(a)")).isEqualTo(Document.parse("{ \"$toLower\" : \"$a\"}")); + assertThat(transform("toLower(a)")).isEqualTo("{ \"$toLower\" : \"$a\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceToUpper() { - assertThat(transform("toUpper(a)")).isEqualTo(Document.parse("{ \"$toUpper\" : \"$a\"}")); + assertThat(transform("toUpper(a)")).isEqualTo("{ \"$toUpper\" : \"$a\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeStrCaseCmp() { - assertThat(transform("strcasecmp(a, b)")).isEqualTo(Document.parse("{ \"$strcasecmp\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("strcasecmp(a, b)")).isEqualTo("{ \"$strcasecmp\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceMeta() { - assertThat(transform("meta('textScore')")).isEqualTo(Document.parse("{ \"$meta\" : \"textScore\"}")); + assertThat(transform("meta('textScore')")).isEqualTo("{ \"$meta\" : \"textScore\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeArrayElemAt() { - assertThat(transform("arrayElemAt(a, 10)")).isEqualTo(Document.parse("{ \"$arrayElemAt\" : [ \"$a\" , 10]}")); + assertThat(transform("arrayElemAt(a, 10)")).isEqualTo("{ \"$arrayElemAt\" : [ \"$a\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeConcatArrays() { assertThat(transform("concatArrays(a, b, c)")) - .isEqualTo(Document.parse("{ \"$concatArrays\" : [ \"$a\" , \"$b\" , \"$c\"]}")); + .isEqualTo("{ \"$concatArrays\" : [ \"$a\" , \"$b\" , \"$c\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeFilter() { - assertThat(transform("filter(a, 'num', '$$num' > 10)")).isEqualTo(Document.parse( - "{ \"$filter\" : { \"input\" : \"$a\" , \"as\" : \"num\" , \"cond\" : { \"$gt\" : [ \"$$num\" , 10]}}}")); + assertThat(transform("filter(a, 'num', '$$num' > 10)")).isEqualTo( + "{ \"$filter\" : { \"input\" : \"$a\" , \"as\" : \"num\" , \"cond\" : { \"$gt\" : [ \"$$num\" , 10]}}}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceIsArray() { - assertThat(transform("isArray(a)")).isEqualTo(Document.parse("{ \"$isArray\" : \"$a\"}")); + assertThat(transform("isArray(a)")).isEqualTo("{ \"$isArray\" : \"$a\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceIsSize() { - assertThat(transform("size(a)")).isEqualTo(Document.parse("{ \"$size\" : \"$a\"}")); + assertThat(transform("size(a)")).isEqualTo("{ \"$size\" : \"$a\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeSlice() { - assertThat(transform("slice(a, 10)")).isEqualTo(Document.parse("{ \"$slice\" : [ \"$a\" , 10]}")); + assertThat(transform("slice(a, 10)")).isEqualTo("{ \"$slice\" : [ \"$a\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeMap() { - assertThat(transform("map(quizzes, 'grade', '$$grade' + 2)")).isEqualTo(Document.parse( - "{ \"$map\" : { \"input\" : \"$quizzes\" , \"as\" : \"grade\" , \"in\" : { \"$add\" : [ \"$$grade\" , 2]}}}")); + assertThat(transform("map(quizzes, 'grade', '$$grade' + 2)")).isEqualTo( + "{ \"$map\" : { \"input\" : \"$quizzes\" , \"as\" : \"grade\" , \"in\" : { \"$add\" : [ \"$$grade\" , 2]}}}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeLet() { - assertThat(transform("let({low:1, high:'$$low'}, gt('$$low', '$$high'))")).isEqualTo(Document.parse( - "{ \"$let\" : { \"vars\" : { \"low\" : 1 , \"high\" : \"$$low\"} , \"in\" : { \"$gt\" : [ \"$$low\" , \"$$high\"]}}}")); + assertThat(transform("let({low:1, high:'$$low'}, gt('$$low', '$$high'))")).isEqualTo( + "{ \"$let\" : { \"vars\" : { \"low\" : 1 , \"high\" : \"$$low\"} , \"in\" : { \"$gt\" : [ \"$$low\" , \"$$high\"]}}}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceLiteral() { - assertThat(transform("literal($1)")).isEqualTo(Document.parse("{ \"$literal\" : \"$1\"}")); + assertThat(transform("literal($1)")).isEqualTo("{ \"$literal\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceDayOfYear() { - assertThat(transform("dayOfYear($1)")).isEqualTo(Document.parse("{ \"$dayOfYear\" : \"$1\"}")); + assertThat(transform("dayOfYear($1)")).isEqualTo("{ \"$dayOfYear\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceDayOfMonth() { - assertThat(transform("dayOfMonth($1)")).isEqualTo(Document.parse("{ \"$dayOfMonth\" : \"$1\"}")); + assertThat(transform("dayOfMonth($1)")).isEqualTo("{ \"$dayOfMonth\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceDayOfWeek() { - assertThat(transform("dayOfWeek($1)")).isEqualTo(Document.parse("{ \"$dayOfWeek\" : \"$1\"}")); + assertThat(transform("dayOfWeek($1)")).isEqualTo("{ \"$dayOfWeek\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceYear() { - assertThat(transform("year($1)")).isEqualTo(Document.parse("{ \"$year\" : \"$1\"}")); + assertThat(transform("year($1)")).isEqualTo("{ \"$year\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceMonth() { - assertThat(transform("month($1)")).isEqualTo(Document.parse("{ \"$month\" : \"$1\"}")); + assertThat(transform("month($1)")).isEqualTo("{ \"$month\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceWeek() { - assertThat(transform("week($1)")).isEqualTo(Document.parse("{ \"$week\" : \"$1\"}")); + assertThat(transform("week($1)")).isEqualTo("{ \"$week\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceHour() { - assertThat(transform("hour($1)")).isEqualTo(Document.parse("{ \"$hour\" : \"$1\"}")); + assertThat(transform("hour($1)")).isEqualTo("{ \"$hour\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceMinute() { - assertThat(transform("minute($1)")).isEqualTo(Document.parse("{ \"$minute\" : \"$1\"}")); + assertThat(transform("minute($1)")).isEqualTo("{ \"$minute\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceSecond() { - assertThat(transform("second($1)")).isEqualTo(Document.parse("{ \"$second\" : \"$1\"}")); + assertThat(transform("second($1)")).isEqualTo("{ \"$second\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceMillisecond() { - assertThat(transform("millisecond($1)")).isEqualTo(Document.parse("{ \"$millisecond\" : \"$1\"}")); + assertThat(transform("millisecond($1)")).isEqualTo("{ \"$millisecond\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceDateToString() { assertThat(transform("dateToString('%Y-%m-%d', $date)")) - .isEqualTo(Document.parse("{ \"$dateToString\" : { \"format\" : \"%Y-%m-%d\" , \"date\" : \"$date\"}}")); + .isEqualTo("{ \"$dateToString\" : { \"format\" : \"%Y-%m-%d\" , \"date\" : \"$date\"}}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceCond() { assertThat(transform("cond(qty > 250, 30, 20)")).isEqualTo( - Document.parse("{ \"$cond\" : { \"if\" : { \"$gt\" : [ \"$qty\" , 250]} , \"then\" : 30 , \"else\" : 20}}")); + "{ \"$cond\" : { \"if\" : { \"$gt\" : [ \"$qty\" , 250]} , \"then\" : 30 , \"else\" : 20}}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeIfNull() { - assertThat(transform("ifNull(a, 10)")).isEqualTo(Document.parse("{ \"$ifNull\" : [ \"$a\" , 10]}")); + assertThat(transform("ifNull(a, 10)")).isEqualTo("{ \"$ifNull\" : [ \"$a\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeSum() { - assertThat(transform("sum(a, b)")).isEqualTo(Document.parse("{ \"$sum\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("sum(a, b)")).isEqualTo("{ \"$sum\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeAvg() { - assertThat(transform("avg(a, b)")).isEqualTo(Document.parse("{ \"$avg\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("avg(a, b)")).isEqualTo("{ \"$avg\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceFirst() { - assertThat(transform("first($1)")).isEqualTo(Document.parse("{ \"$first\" : \"$1\"}")); + assertThat(transform("first($1)")).isEqualTo("{ \"$first\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceLast() { - assertThat(transform("last($1)")).isEqualTo(Document.parse("{ \"$last\" : \"$1\"}")); + assertThat(transform("last($1)")).isEqualTo("{ \"$last\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeMax() { - assertThat(transform("max(a, b)")).isEqualTo(Document.parse("{ \"$max\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("max(a, b)")).isEqualTo("{ \"$max\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeMin() { - assertThat(transform("min(a, b)")).isEqualTo(Document.parse("{ \"$min\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("min(a, b)")).isEqualTo("{ \"$min\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodePush() { assertThat(transform("push({'item':'$item', 'quantity':'$qty'})")) - .isEqualTo(Document.parse("{ \"$push\" : { \"item\" : \"$item\" , \"quantity\" : \"$qty\"}}")); + .isEqualTo("{ \"$push\" : { \"item\" : \"$item\" , \"quantity\" : \"$qty\"}}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceAddToSet() { - assertThat(transform("addToSet($1)")).isEqualTo(Document.parse("{ \"$addToSet\" : \"$1\"}")); + assertThat(transform("addToSet($1)")).isEqualTo("{ \"$addToSet\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeStdDevPop() { assertThat(transform("stdDevPop(scores.score)")) - .isEqualTo(Document.parse("{ \"$stdDevPop\" : [ \"$scores.score\"]}")); + .isEqualTo("{ \"$stdDevPop\" : [ \"$scores.score\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeStdDevSamp() { - assertThat(transform("stdDevSamp(age)")).isEqualTo(Document.parse("{ \"$stdDevSamp\" : [ \"$age\"]}")); + assertThat(transform("stdDevSamp(age)")).isEqualTo("{ \"$stdDevSamp\" : [ \"$age\"]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodeEq() { - assertThat(transform("foo == 10")).isEqualTo(Document.parse("{ \"$eq\" : [ \"$foo\" , 10]}")); + assertThat(transform("foo == 10")).isEqualTo("{ \"$eq\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodeNe() { - assertThat(transform("foo != 10")).isEqualTo(Document.parse("{ \"$ne\" : [ \"$foo\" , 10]}")); + assertThat(transform("foo != 10")).isEqualTo("{ \"$ne\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodeGt() { - assertThat(transform("foo > 10")).isEqualTo(Document.parse("{ \"$gt\" : [ \"$foo\" , 10]}")); + assertThat(transform("foo > 10")).isEqualTo("{ \"$gt\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodeGte() { - assertThat(transform("foo >= 10")).isEqualTo(Document.parse("{ \"$gte\" : [ \"$foo\" , 10]}")); + assertThat(transform("foo >= 10")).isEqualTo("{ \"$gte\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodeLt() { - assertThat(transform("foo < 10")).isEqualTo(Document.parse("{ \"$lt\" : [ \"$foo\" , 10]}")); + assertThat(transform("foo < 10")).isEqualTo("{ \"$lt\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodeLte() { - assertThat(transform("foo <= 10")).isEqualTo(Document.parse("{ \"$lte\" : [ \"$foo\" , 10]}")); + assertThat(transform("foo <= 10")).isEqualTo("{ \"$lte\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodePow() { - assertThat(transform("foo^2")).isEqualTo(Document.parse("{ \"$pow\" : [ \"$foo\" , 2]}")); + assertThat(transform("foo^2")).isEqualTo("{ \"$pow\" : [ \"$foo\" , 2]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodeOr() { - assertThat(transform("true || false")).isEqualTo(Document.parse("{ \"$or\" : [ true , false]}")); + assertThat(transform("true || false")).isEqualTo("{ \"$or\" : [ true , false]}"); } @Test // DATAMONGO-1530 void shouldRenderComplexOperationNodeOr() { assertThat(transform("1+2 || concat(a, b) || true")).isEqualTo( - Document.parse("{ \"$or\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}")); + "{ \"$or\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodeAnd() { - assertThat(transform("true && false")).isEqualTo(Document.parse("{ \"$and\" : [ true , false]}")); + assertThat(transform("true && false")).isEqualTo("{ \"$and\" : [ true , false]}"); } @Test // DATAMONGO-1530 void shouldRenderComplexOperationNodeAnd() { assertThat(transform("1+2 && concat(a, b) && true")).isEqualTo( - Document.parse("{ \"$and\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}")); + "{ \"$and\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}"); } @Test // DATAMONGO-1530 void shouldRenderNotCorrectly() { - assertThat(transform("!true")).isEqualTo(Document.parse("{ \"$not\" : [ true]}")); + assertThat(transform("!true")).isEqualTo("{ \"$not\" : [ true]}"); } @Test // DATAMONGO-1530 void shouldRenderComplexNotCorrectly() { - assertThat(transform("!(foo > 10)")).isEqualTo(Document.parse("{ \"$not\" : [ { \"$gt\" : [ \"$foo\" , 10]}]}")); + assertThat(transform("!(foo > 10)")).isEqualTo("{ \"$not\" : [ { \"$gt\" : [ \"$foo\" , 10]}]}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceIndexOfBytes() { assertThat(transform("indexOfBytes(item, 'foo')")) - .isEqualTo(Document.parse("{ \"$indexOfBytes\" : [ \"$item\" , \"foo\"]}")); + .isEqualTo("{ \"$indexOfBytes\" : [ \"$item\" , \"foo\"]}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceIndexOfCP() { assertThat(transform("indexOfCP(item, 'foo')")) - .isEqualTo(Document.parse("{ \"$indexOfCP\" : [ \"$item\" , \"foo\"]}")); + .isEqualTo("{ \"$indexOfCP\" : [ \"$item\" , \"foo\"]}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceSplit() { - assertThat(transform("split(item, ',')")).isEqualTo(Document.parse("{ \"$split\" : [ \"$item\" , \",\"]}")); + assertThat(transform("split(item, ',')")).isEqualTo("{ \"$split\" : [ \"$item\" , \",\"]}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceStrLenBytes() { - assertThat(transform("strLenBytes(item)")).isEqualTo(Document.parse("{ \"$strLenBytes\" : \"$item\"}")); + assertThat(transform("strLenBytes(item)")).isEqualTo("{ \"$strLenBytes\" : \"$item\"}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceStrLenCP() { - assertThat(transform("strLenCP(item)")).isEqualTo(Document.parse("{ \"$strLenCP\" : \"$item\"}")); + assertThat(transform("strLenCP(item)")).isEqualTo("{ \"$strLenCP\" : \"$item\"}"); } @Test // DATAMONGO-1548 void shouldRenderMethodSubstrCP() { - assertThat(transform("substrCP(item, 0, 5)")).isEqualTo(Document.parse("{ \"$substrCP\" : [ \"$item\" , 0 , 5]}")); + assertThat(transform("substrCP(item, 0, 5)")).isEqualTo("{ \"$substrCP\" : [ \"$item\" , 0 , 5]}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceReverseArray() { - assertThat(transform("reverseArray(array)")).isEqualTo(Document.parse("{ \"$reverseArray\" : \"$array\"}")); + assertThat(transform("reverseArray(array)")).isEqualTo("{ \"$reverseArray\" : \"$array\"}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceReduce() { - assertThat(transform("reduce(field, '', {'$concat':{'$$value','$$this'}})")).isEqualTo(Document.parse( - "{ \"$reduce\" : { \"input\" : \"$field\" , \"initialValue\" : \"\" , \"in\" : { \"$concat\" : [ \"$$value\" , \"$$this\"]}}}")); + assertThat(transform("reduce(field, '', {'$concat':{'$$value','$$this'}})")).isEqualTo( + "{ \"$reduce\" : { \"input\" : \"$field\" , \"initialValue\" : \"\" , \"in\" : { \"$concat\" : [ \"$$value\" , \"$$this\"]}}}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceZip() { assertThat(transform("zip(new String[]{'$array1', '$array2'})")) - .isEqualTo(Document.parse("{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"]}}")); + .isEqualTo("{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"]}}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceZipWithOptionalArgs() { - assertThat(transform("zip(new String[]{'$array1', '$array2'}, true, new int[]{1,2})")).isEqualTo(Document.parse( - "{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"] , \"useLongestLength\" : true , \"defaults\" : [ 1 , 2]}}")); + assertThat(transform("zip(new String[]{'$array1', '$array2'}, true, new int[]{1,2})")).isEqualTo( + "{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"] , \"useLongestLength\" : true , \"defaults\" : [ 1 , 2]}}"); } @Test // DATAMONGO-1548 void shouldRenderMethodIn() { - assertThat(transform("in('item', array)")).isEqualTo(Document.parse("{ \"$in\" : [ \"item\" , \"$array\"]}")); + assertThat(transform("in('item', array)")).isEqualTo("{ \"$in\" : [ \"item\" , \"$array\"]}"); } @Test // DATAMONGO-1548 void shouldRenderMethodRefereneIsoDayOfWeek() { - assertThat(transform("isoDayOfWeek(date)")).isEqualTo(Document.parse("{ \"$isoDayOfWeek\" : \"$date\"}")); + assertThat(transform("isoDayOfWeek(date)")).isEqualTo("{ \"$isoDayOfWeek\" : \"$date\"}"); } @Test // DATAMONGO-1548 void shouldRenderMethodRefereneIsoWeek() { - assertThat(transform("isoWeek(date)")).isEqualTo(Document.parse("{ \"$isoWeek\" : \"$date\"}")); + assertThat(transform("isoWeek(date)")).isEqualTo("{ \"$isoWeek\" : \"$date\"}"); } @Test // DATAMONGO-1548 void shouldRenderMethodRefereneIsoWeekYear() { - assertThat(transform("isoWeekYear(date)")).isEqualTo(Document.parse("{ \"$isoWeekYear\" : \"$date\"}")); + assertThat(transform("isoWeekYear(date)")).isEqualTo("{ \"$isoWeekYear\" : \"$date\"}"); } @Test // DATAMONGO-1548 void shouldRenderMethodRefereneType() { - assertThat(transform("type(a)")).isEqualTo(Document.parse("{ \"$type\" : \"$a\"}")); + assertThat(transform("type(a)")).isEqualTo("{ \"$type\" : \"$a\"}"); } @Test // DATAMONGO-2077 void shouldRenderArrayToObjectWithFieldReference() { - assertThat(transform("arrayToObject(field)")).isEqualTo(Document.parse("{ \"$arrayToObject\" : \"$field\"}")); + assertThat(transform("arrayToObject(field)")).isEqualTo("{ \"$arrayToObject\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderArrayToObjectWithArray() { assertThat(transform("arrayToObject(new String[]{'key', 'value'})")) - .isEqualTo(Document.parse("{ \"$arrayToObject\" : [\"key\", \"value\"]}")); + .isEqualTo("{ \"$arrayToObject\" : [\"key\", \"value\"]}"); } @Test // DATAMONGO-2077 void shouldRenderObjectToArrayWithFieldReference() { - assertThat(transform("objectToArray(field)")).isEqualTo(Document.parse("{ \"$objectToArray\" : \"$field\"}")); + assertThat(transform("objectToArray(field)")).isEqualTo("{ \"$objectToArray\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderMergeObjects() { assertThat(transform("mergeObjects(field1, $$ROOT)")) - .isEqualTo(Document.parse("{ \"$mergeObjects\" : [\"$field1\", \"$$ROOT\"]}")); + .isEqualTo("{ \"$mergeObjects\" : [\"$field1\", \"$$ROOT\"]}"); } @Test // DATAMONGO-2077 void shouldRenderTrimWithoutChars() { - assertThat(transform("trim(field)")).isEqualTo(Document.parse("{ \"$trim\" : {\"input\" : \"$field\"}}")); + assertThat(transform("trim(field)")).isEqualTo("{ \"$trim\" : {\"input\" : \"$field\"}}"); } @Test // DATAMONGO-2077 void shouldRenderTrimWithChars() { assertThat(transform("trim(field, 'ie')")) - .isEqualTo(Document.parse("{ \"$trim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}")); + .isEqualTo("{ \"$trim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}"); } @Test // DATAMONGO-2077 void shouldRenderTrimWithCharsFromFieldReference() { assertThat(transform("trim(field1, field2)")) - .isEqualTo(Document.parse("{ \"$trim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}")); + .isEqualTo("{ \"$trim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}"); } @Test // DATAMONGO-2077 void shouldRenderLtrimWithoutChars() { - assertThat(transform("ltrim(field)")).isEqualTo(Document.parse("{ \"$ltrim\" : {\"input\" : \"$field\"}}")); + assertThat(transform("ltrim(field)")).isEqualTo("{ \"$ltrim\" : {\"input\" : \"$field\"}}"); } @Test // DATAMONGO-2077 void shouldRenderLtrimWithChars() { assertThat(transform("ltrim(field, 'ie')")) - .isEqualTo(Document.parse("{ \"$ltrim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}")); + .isEqualTo("{ \"$ltrim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}"); } @Test // DATAMONGO-2077 void shouldRenderLtrimWithCharsFromFieldReference() { assertThat(transform("ltrim(field1, field2)")) - .isEqualTo(Document.parse("{ \"$ltrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}")); + .isEqualTo("{ \"$ltrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}"); } @Test // DATAMONGO-2077 void shouldRenderRtrimWithoutChars() { - assertThat(transform("rtrim(field)")).isEqualTo(Document.parse("{ \"$rtrim\" : {\"input\" : \"$field\"}}")); + assertThat(transform("rtrim(field)")).isEqualTo("{ \"$rtrim\" : {\"input\" : \"$field\"}}"); } @Test // DATAMONGO-2077 void shouldRenderRtrimWithChars() { assertThat(transform("rtrim(field, 'ie')")) - .isEqualTo(Document.parse("{ \"$rtrim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}")); + .isEqualTo("{ \"$rtrim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}"); } @Test // DATAMONGO-2077 void shouldRenderRtrimWithCharsFromFieldReference() { assertThat(transform("rtrim(field1, field2)")) - .isEqualTo(Document.parse("{ \"$rtrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}")); + .isEqualTo("{ \"$rtrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}"); } @Test // DATAMONGO-2077 void shouldRenderConvertWithoutOptionalParameters() { assertThat(transform("convert(field, 'string')")) - .isEqualTo(Document.parse("{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"string\" }}")); + .isEqualTo("{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"string\" }}"); } @Test // DATAMONGO-2077 void shouldRenderConvertWithOnError() { - assertThat(transform("convert(field, 'int', 'Not an integer.')")).isEqualTo(Document - .parse("{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"int\", \"onError\" : \"Not an integer.\" }}")); + assertThat(transform("convert(field, 'int', 'Not an integer.')")) + .isEqualTo("{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"int\", \"onError\" : \"Not an integer.\" }}"); } @Test // DATAMONGO-2077 void shouldRenderConvertWithOnErrorOnNull() { - assertThat(transform("convert(field, 'int', 'Not an integer.', -1)")).isEqualTo(Document.parse( - "{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"int\", \"onError\" : \"Not an integer.\", \"onNull\" : -1 }}")); + assertThat(transform("convert(field, 'int', 'Not an integer.', -1)")).isEqualTo( + "{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"int\", \"onError\" : \"Not an integer.\", \"onNull\" : -1 }}"); } @Test // DATAMONGO-2077 void shouldRenderToBool() { - assertThat(transform("toBool(field)")).isEqualTo(Document.parse("{ \"$toBool\" : \"$field\"}")); + assertThat(transform("toBool(field)")).isEqualTo("{ \"$toBool\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderToDate() { - assertThat(transform("toDate(field)")).isEqualTo(Document.parse("{ \"$toDate\" : \"$field\"}")); + assertThat(transform("toDate(field)")).isEqualTo("{ \"$toDate\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderToDecimal() { - assertThat(transform("toDecimal(field)")).isEqualTo(Document.parse("{ \"$toDecimal\" : \"$field\"}")); + assertThat(transform("toDecimal(field)")).isEqualTo("{ \"$toDecimal\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderToDouble() { - assertThat(transform("toDouble(field)")).isEqualTo(Document.parse("{ \"$toDouble\" : \"$field\"}")); + assertThat(transform("toDouble(field)")).isEqualTo("{ \"$toDouble\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderToInt() { - assertThat(transform("toInt(field)")).isEqualTo(Document.parse("{ \"$toInt\" : \"$field\"}")); + assertThat(transform("toInt(field)")).isEqualTo("{ \"$toInt\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderToLong() { - assertThat(transform("toLong(field)")).isEqualTo(Document.parse("{ \"$toLong\" : \"$field\"}")); + assertThat(transform("toLong(field)")).isEqualTo("{ \"$toLong\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderToObjectId() { - assertThat(transform("toObjectId(field)")).isEqualTo(Document.parse("{ \"$toObjectId\" : \"$field\"}")); + assertThat(transform("toObjectId(field)")).isEqualTo("{ \"$toObjectId\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderToString() { - assertThat(transform("toString(field)")).isEqualTo(Document.parse("{ \"$toString\" : \"$field\"}")); + assertThat(transform("toString(field)")).isEqualTo("{ \"$toString\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderDateFromStringWithoutOptionalParameters() { assertThat(transform("dateFromString(field)")) - .isEqualTo(Document.parse("{ \"$dateFromString\" : {\"dateString\" : \"$field\" }}")); + .isEqualTo("{ \"$dateFromString\" : {\"dateString\" : \"$field\" }}"); } @Test // DATAMONGO-2077 void shouldRenderDateFromStringWithFormat() { assertThat(transform("dateFromString(field, 'DD-MM-YYYY')")).isEqualTo( - Document.parse("{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\" }}")); + "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\" }}"); } @Test // DATAMONGO-2077 void shouldRenderDateFromStringWithFormatAndTimezone() { - assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC')")).isEqualTo(Document.parse( - "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\" }}")); + assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC')")).isEqualTo( + "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\" }}"); } @Test // DATAMONGO-2077 void shouldRenderDateFromStringWithFormatTimezoneAndOnError() { - assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC', -1)")).isEqualTo(Document.parse( - "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\", \"onError\" : -1 }}")); + assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC', -1)")).isEqualTo( + "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\", \"onError\" : -1 }}"); } @Test // DATAMONGO-2077 void shouldRenderDateFromStringWithFormatTimezoneOnErrorAndOnNull() { - assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC', -1, -2)")).isEqualTo(Document.parse( - "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\", \"onError\" : -1, \"onNull\" : -2}}")); + assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC', -1, -2)")).isEqualTo( + "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\", \"onError\" : -1, \"onNull\" : -2}}"); } @Test // DATAMONGO-2077, DATAMONGO-2671 void shouldRenderDateFromParts() { - assertThat(transform("dateFromParts(y, m, d, h, mm, s, ms, 'UTC')")).isEqualTo(Document.parse( - "{ \"$dateFromParts\" : {\"year\" : \"$y\", \"month\" : \"$m\", \"day\" : \"$d\", \"hour\" : \"$h\", \"minute\" : \"$mm\", \"second\" : \"$s\", \"millisecond\" : \"$ms\", \"timezone\" : \"UTC\"}}")); + assertThat(transform("dateFromParts(y, m, d, h, mm, s, ms, 'UTC')")).isEqualTo( + "{ \"$dateFromParts\" : {\"year\" : \"$y\", \"month\" : \"$m\", \"day\" : \"$d\", \"hour\" : \"$h\", \"minute\" : \"$mm\", \"second\" : \"$s\", \"millisecond\" : \"$ms\", \"timezone\" : \"UTC\"}}"); } @Test // DATAMONGO-2077, DATAMONGO-2671 void shouldRenderIsoDateFromParts() { - assertThat(transform("isoDateFromParts(y, m, d, h, mm, s, ms, 'UTC')")).isEqualTo(Document.parse( - "{ \"$dateFromParts\" : {\"isoWeekYear\" : \"$y\", \"isoWeek\" : \"$m\", \"isoDayOfWeek\" : \"$d\", \"hour\" : \"$h\", \"minute\" : \"$mm\", \"second\" : \"$s\", \"millisecond\" : \"$ms\", \"timezone\" : \"UTC\"}}")); + assertThat(transform("isoDateFromParts(y, m, d, h, mm, s, ms, 'UTC')")).isEqualTo( + "{ \"$dateFromParts\" : {\"isoWeekYear\" : \"$y\", \"isoWeek\" : \"$m\", \"isoDayOfWeek\" : \"$d\", \"hour\" : \"$h\", \"minute\" : \"$mm\", \"second\" : \"$s\", \"millisecond\" : \"$ms\", \"timezone\" : \"UTC\"}}"); } @Test // DATAMONGO-2077 void shouldRenderDateToParts() { assertThat(transform("dateToParts(field, 'UTC', false)")).isEqualTo( - Document.parse("{ \"$dateToParts\" : {\"date\" : \"$field\", \"timezone\" : \"UTC\", \"iso8601\" : false}}")); + "{ \"$dateToParts\" : {\"date\" : \"$field\", \"timezone\" : \"UTC\", \"iso8601\" : false}}"); } @Test // DATAMONGO-2077 void shouldRenderIndexOfArray() { assertThat(transform("indexOfArray(field, 2)")) - .isEqualTo(Document.parse("{ \"$indexOfArray\" : [\"$field\", 2 ]}")); + .isEqualTo("{ \"$indexOfArray\" : [\"$field\", 2 ]}"); } @Test // DATAMONGO-2077 void shouldRenderRange() { - assertThat(transform("range(0, 10, 2)")).isEqualTo(Document.parse("{ \"$range\" : [0, 10, 2 ]}")); + assertThat(transform("range(0, 10, 2)")).isEqualTo("{ \"$range\" : [0, 10, 2 ]}"); } @Test // DATAMONGO-2370 void shouldRenderRound() { - assertThat(transform("round(field)")).isEqualTo(Document.parse("{ \"$round\" : [\"$field\"]}")); + assertThat(transform("round(field)")).isEqualTo("{ \"$round\" : [\"$field\"]}"); } @Test // DATAMONGO-2370 void shouldRenderRoundWithPlace() { - assertThat(transform("round(field, 2)")).isEqualTo(Document.parse("{ \"$round\" : [\"$field\", 2]}")); + assertThat(transform("round(field, 2)")).isEqualTo("{ \"$round\" : [\"$field\", 2]}"); } @Test // GH-3714 void shouldRenderDegreesToRadians() { - assertThat(transform("degreesToRadians(angle_a)")).isEqualTo(Document.parse("{ \"$degreesToRadians\" : \"$angle_a\"}")); + assertThat(transform("degreesToRadians(angle_a)")).isEqualTo("{ \"$degreesToRadians\" : \"$angle_a\"}"); } @Test // GH-3712 void shouldRenderCovariancePop() { assertThat(transform("covariancePop(field1, field2)")) - .isEqualTo(Document.parse("{ \"$covariancePop\" : [\"$field1\", \"$field2\"]}")); + .isEqualTo("{ \"$covariancePop\" : [\"$field1\", \"$field2\"]}"); } @Test // GH-3712 void shouldRenderCovarianceSamp() { assertThat(transform("covarianceSamp(field1, field2)")) - .isEqualTo(Document.parse("{ \"$covarianceSamp\" : [\"$field1\", \"$field2\"]}")); + .isEqualTo("{ \"$covarianceSamp\" : [\"$field1\", \"$field2\"]}"); } @Test // GH-3715 void shouldRenderRank() { - assertThat(transform("rank()")).isEqualTo(Document.parse("{ $rank : {} }")); + assertThat(transform("rank()")).isEqualTo("{ $rank : {} }"); } @Test // GH-3715 void shouldRenderDenseRank() { - assertThat(transform("denseRank()")).isEqualTo(Document.parse("{ $denseRank : {} }")); + assertThat(transform("denseRank()")).isEqualTo("{ $denseRank : {} }"); } @Test // GH-3717 void shouldRenderDocumentNumber() { - assertThat(transform("documentNumber()")).isEqualTo(Document.parse("{ $documentNumber : {} }")); + assertThat(transform("documentNumber()")).isEqualTo("{ $documentNumber : {} }"); } @Test // GH-3727 void rendersShift() { assertThat(transform("shift(quantity, 1)")) - .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1 } }")); + .isEqualTo("{ $shift: { output: \"$quantity\", by: 1 } }"); } @Test // GH-3727 void rendersShiftWithDefault() { assertThat(transform("shift(quantity, 1, 'Not available')")) - .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }")); + .isEqualTo("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }"); } @Test // GH-3716 void shouldRenderDerivative() { assertThat(transform("derivative(miles, 'hour')")) - .isEqualTo(Document.parse("{ \"$derivative\" : { input : '$miles', unit : 'hour'} }")); + .isEqualTo("{ \"$derivative\" : { input : '$miles', unit : 'hour'} }"); } @Test // GH-3721 void shouldRenderIntegral() { - assertThat(transform("integral(field)")).isEqualTo(Document.parse("{ \"$integral\" : { \"input\" : \"$field\" }}")); + assertThat(transform("integral(field)")).isEqualTo("{ \"$integral\" : { \"input\" : \"$field\" }}"); } @Test // GH-3721 void shouldRenderIntegralWithUnit() { assertThat(transform("integral(field, 'hour')")) - .isEqualTo(Document.parse("{ \"$integral\" : { \"input\" : \"$field\", \"unit\" : \"hour\" }}")); + .isEqualTo("{ \"$integral\" : { \"input\" : \"$field\", \"unit\" : \"hour\" }}"); } @Test // GH-3728 void shouldRenderSin() { - assertThat(transform("sin(angle)")).isEqualTo(Document.parse("{ \"$sin\" : \"$angle\"}")); + assertThat(transform("sin(angle)")).isEqualTo("{ \"$sin\" : \"$angle\"}"); } @Test // GH-3728 void shouldRenderSinh() { - assertThat(transform("sinh(angle)")).isEqualTo(Document.parse("{ \"$sinh\" : \"$angle\"}")); + assertThat(transform("sinh(angle)")).isEqualTo("{ \"$sinh\" : \"$angle\"}"); } @Test // GH-3710 void shouldRenderCos() { - assertThat(transform("cos(angle)")).isEqualTo(Document.parse("{ \"$cos\" : \"$angle\"}")); + assertThat(transform("cos(angle)")).isEqualTo("{ \"$cos\" : \"$angle\"}"); } @Test // GH-3710 void shouldRenderCosh() { - assertThat(transform("cosh(angle)")).isEqualTo(Document.parse("{ \"$cosh\" : \"$angle\"}")); + assertThat(transform("cosh(angle)")).isEqualTo("{ \"$cosh\" : \"$angle\"}"); } @Test // GH-3730 void shouldRenderTan() { - assertThat(transform("tan(angle)")).isEqualTo(Document.parse("{ \"$tan\" : \"$angle\"}")); + assertThat(transform("tan(angle)")).isEqualTo("{ \"$tan\" : \"$angle\"}"); } @Test // GH-3730 void shouldRenderTanh() { - assertThat(transform("tanh(angle)")).isEqualTo(Document.parse("{ \"$tanh\" : \"$angle\"}")); + assertThat(transform("tanh(angle)")).isEqualTo("{ \"$tanh\" : \"$angle\"}"); } - private Object transform(String expression, Object... params) { + private Document transform(String expression, Object... params) { + return (Document) transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); + } + + private Object transformValue(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); } diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index 9b00811a7b..6373eb663c 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -85,7 +85,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `count` (+++*+++), `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators -| `abs`, `add` (+++*+++ via `plus`), `ceil`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `trunc` +| `abs`, `add` (+++*+++ via `plus`), `ceil`, `cos`, `cosh`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `sin`, `sinh`, `tan`, `tanh`, `trunc` | String Aggregation Operators | `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` @@ -112,7 +112,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `type` | Convert Aggregation Operators -| `convert`, `toBool`, `toDate`, `toDecimal`, `toDouble`, `toInt`, `toLong`, `toObjectId`, `toString` +| `convert`, `degreesToRadians`, `toBool`, `toDate`, `toDecimal`, `toDouble`, `toInt`, `toLong`, `toObjectId`, `toString` | Object Aggregation Operators | `objectToArray`, `mergeObjects` From aca403c11240a5e2253d36e4ecba627283dd9ee7 Mon Sep 17 00:00:00 2001 From: Ryan Gibb Date: Fri, 30 Jul 2021 14:06:42 +0100 Subject: [PATCH 057/885] Fix a typo in `MongoConverter` javadoc. Original pull request: #3758. --- .../data/mongodb/core/convert/MongoConverter.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java index 8887a3bd03..20499d3173 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java @@ -40,13 +40,14 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author Ryan Gibb */ public interface MongoConverter extends EntityConverter, MongoPersistentProperty, Object, Bson>, MongoWriter, EntityReader { /** - * Returns thw {@link TypeMapper} being used to write type information into {@link Document}s created with that + * Returns the {@link TypeMapper} being used to write type information into {@link Document}s created with that * converter. * * @return will never be {@literal null}. From 869b88702db52a25417da5427859f75621d78e7d Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 25 Aug 2021 10:15:13 +0200 Subject: [PATCH 058/885] Polishing. Fix typo in reference docs. See #3758 --- src/main/asciidoc/reference/aggregation-framework.adoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index 6373eb663c..f23b290697 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -88,7 +88,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `abs`, `add` (+++*+++ via `plus`), `ceil`, `cos`, `cosh`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `sin`, `sinh`, `tan`, `tanh`, `trunc` | String Aggregation Operators -| `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` +| `concat`, `substr`, `toLower`, `toUpper`, `strcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` | Comparison Aggregation Operators | `eq` (+++*+++ via `is`), `gt`, `gte`, `lt`, `lte`, `ne` From afef243634e4efa215755cf14b678bebe10ff92a Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 26 Jul 2021 08:46:22 +0200 Subject: [PATCH 059/885] Add support for `$dateAdd` aggregation operator. Closes: #3713 Original pull request: #3748. --- .../core/aggregation/DateOperators.java | 144 +++++++++++++++++- .../core/spel/MethodReferenceNode.java | 1 + .../aggregation/DateOperatorsUnitTests.java | 44 ++++++ .../SpelExpressionTransformerUnitTests.java | 6 + 4 files changed, 189 insertions(+), 6 deletions(-) create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java index 15f10f7d6c..f7abf88d72 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java @@ -16,6 +16,7 @@ package org.springframework.data.mongodb.core.aggregation; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; @@ -156,7 +157,7 @@ public static Timezone none() { * representing an Olson Timezone Identifier or UTC Offset. * * @param value the plain timezone {@link String}, a {@link Field} holding the timezone or an - * {@link AggregationExpression} resulting in the timezone. + * {@link AggregationExpression} resulting in the timezone. * @return new instance of {@link Timezone}. */ public static Timezone valueOf(Object value) { @@ -274,6 +275,41 @@ public DateOperatorFactory withTimezone(Timezone timezone) { return new DateOperatorFactory(fieldReference, expression, dateValue, timezone); } + /** + * Creates new {@link AggregationExpression} that adds the value of the given {@link AggregationExpression + * expression} (in {@literal units). @param expression must not be {@literal null}. + * + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. + * @since 3.3 + */ + public DateAdd addValueOf(AggregationExpression expression, String unit) { + return applyTimezone(DateAdd.addValueOf(expression, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the value stored at the given {@literal field} (in + * {@literal units). @param fieldReference must not be {@literal null}. + * + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. + * @since 3.3 + */ + public DateAdd addValueOf(String fieldReference, String unit) { + return applyTimezone(DateAdd.addValueOf(fieldReference, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the given value (in {@literal units). @param value must not + * be {@literal null}. @param unit the unit of measure. Must not be {@literal null}. + * + * @return + * @since 3.3 new instance of {@link DateAdd}. + */ + public DateAdd add(Object value, String unit) { + return applyTimezone(DateAdd.addValue(value, unit).toDate(dateReference()), timezone); + } + /** * Creates new {@link AggregationExpression} that returns the day of the year for a date as a number between 1 and * 366. @@ -1480,7 +1516,6 @@ protected java.util.Map append(String key, Object value) { } else { clone.put("timezone", ((Timezone) value).value); } - } else { clone.put(key, value); } @@ -1911,7 +1946,7 @@ default T millisecondOf(AggregationExpression expression) { * @author Matt Morrissette * @author Christoph Strobl * @see https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/ + * "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/ * @since 2.1 */ public static class DateFromParts extends TimezonedDateAggregationExpression implements DateParts { @@ -2086,7 +2121,7 @@ default DateFromParts yearOf(AggregationExpression expression) { * @author Matt Morrissette * @author Christoph Strobl * @see https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/ + * "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/ * @since 2.1 */ public static class IsoDateFromParts extends TimezonedDateAggregationExpression @@ -2262,7 +2297,7 @@ default IsoDateFromParts isoWeekYearOf(AggregationExpression expression) { * @author Matt Morrissette * @author Christoph Strobl * @see https://docs.mongodb.com/manual/reference/operator/aggregation/dateToParts/ + * "https://docs.mongodb.com/manual/reference/operator/aggregation/dateToParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateToParts/ * @since 2.1 */ public static class DateToParts extends TimezonedDateAggregationExpression { @@ -2343,7 +2378,7 @@ protected String getMongoMethod() { * @author Matt Morrissette * @author Christoph Strobl * @see https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromString/ + * "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromString/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromString/ * @since 2.1 */ public static class DateFromString extends TimezonedDateAggregationExpression { @@ -2418,6 +2453,103 @@ protected String getMongoMethod() { } } + /** + * {@link AggregationExpression} for {@code $dateAdd}.
+ * NOTE: Requires MongoDB 5.0 or later. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DateAdd extends TimezonedDateAggregationExpression { + + private DateAdd(Object value) { + super(value); + } + + /** + * Add the number of {@literal units} of the result of the given {@link AggregationExpression expression} to a + * {@link #toDate(Object) start date}. + * + * @param expression must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateAdd addValueOf(AggregationExpression expression, String unit) { + return addValue(expression, unit); + } + + /** + * Add the number of {@literal units} from a {@literal field} to a {@link #toDate(Object) start date}. + * + * @param fieldReference must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateAdd addValueOf(String fieldReference, String unit) { + return addValue(Fields.field(fieldReference), unit); + } + + /** + * Add the number of {@literal units} to a {@link #toDate(Object) start date}. + * + * @param value must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateAdd addValue(Object value, String unit) { + + Map args = new HashMap<>(); + args.put("unit", unit); + args.put("amount", value); + return new DateAdd(args); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateAdd toDateOf(AggregationExpression expression) { + return toDate(expression); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateAdd toDateOf(String fieldReference) { + return toDate(Fields.field(fieldReference)); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param dateExpression anything that evaluates to a valid date. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateAdd toDate(Object dateExpression) { + return new DateAdd(append("startDate", dateExpression)); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateAdd}. + */ + public DateAdd withTimezone(Timezone timezone) { + return new DateAdd(appendTimezone(argumentMap(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$dateAdd"; + } + } + @SuppressWarnings("unchecked") private static T applyTimezone(T instance, Timezone timezone) { return !ObjectUtils.nullSafeEquals(Timezone.none(), timezone) && !instance.hasTimezone() diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 1efe94c757..6b4daa15b8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -144,6 +144,7 @@ public class MethodReferenceNode extends ExpressionNode { map.put("literal", singleArgRef().forOperator("$literal")); // DATE OPERATORS + map.put("dateAdd", mapArgRef().forOperator("$dateAdd").mappingParametersTo("startDate", "unit", "amount", "timezone")); map.put("dayOfYear", singleArgRef().forOperator("$dayOfYear")); map.put("dayOfMonth", singleArgRef().forOperator("$dayOfMonth")); map.put("dayOfWeek", singleArgRef().forOperator("$dayOfWeek")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java new file mode 100644 index 0000000000..036edfdce1 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java @@ -0,0 +1,44 @@ +/* + * Copyright 2021. the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.aggregation.DateOperators.Timezone; + +/** + * @author Christoph Strobl + */ +class DateOperatorsUnitTests { + + @Test // GH-3713 + void rendersDateAdd() { + + assertThat(DateOperators.dateOf("purchaseDate").add(3, "day").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }")); + } + + @Test // GH-3713 + void rendersDateAddWithTimezone() { + + assertThat(DateOperators.dateOf("purchaseDate").withTimezone(Timezone.valueOf("America/Chicago")).add(3, "day") + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse( + "{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3, timezone : \"America/Chicago\" } }")); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 193ffb520d..337d61f984 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1039,6 +1039,12 @@ private Document transform(String expression, Object... params) { } private Object transformValue(String expression, Object... params) { + @Test // GH-3713 + void shouldRenderDateAdd() { + assertThat(transform("dateAdd(purchaseDate, 'day', 3)")).isEqualTo(Document.parse("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }")); + } + + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); } From fc41793d5de7bf48f551638e9836ef0eaceb1c43 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 26 Jul 2021 09:08:18 +0200 Subject: [PATCH 060/885] Add support for `$dateDiff` aggregation operator. Closes: #3713 Original pull request: #3748. --- .../core/aggregation/DateOperators.java | 144 ++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 1 + .../aggregation/DateOperatorsUnitTests.java | 16 ++ .../SpelExpressionTransformerUnitTests.java | 5 + 4 files changed, 166 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java index f7abf88d72..a9b1d411cd 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java @@ -340,6 +340,42 @@ public DayOfWeek dayOfWeek() { return applyTimezone(DayOfWeek.dayOfWeek(dateReference()), timezone); } + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units) to the date + * computed by the given {@link AggregationExpression expression}. @param expression must not be {@literal null}. + * + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. + * @since 3.3 + */ + public DateDiff diffValueOf(AggregationExpression expression, String unit) { + return applyTimezone(DateDiff.diffValueOf(expression, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units) to the date stored + * at the given {@literal field}. @param expression must not be {@literal null}. + * + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. + * @since 3.3 + */ + public DateDiff diffValueOf(String fieldReference, String unit) { + return applyTimezone(DateDiff.diffValueOf(fieldReference, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units) to the date given + * {@literal value}. @param value anything the resolves to a valid date. Must not be {@literal null}. + * + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. + * @since 3.3 + */ + public DateDiff diff(Object value, String unit) { + return applyTimezone(DateDiff.diffValue(value, unit).toDate(dateReference()), timezone); + } + /** * Creates new {@link AggregationExpression} that returns the year portion of a date. * @@ -2550,6 +2586,114 @@ protected String getMongoMethod() { } } + /** + * {@link AggregationExpression} for {@code $dateDiff}.
+ * NOTE: Requires MongoDB 5.0 or later. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DateDiff extends TimezonedDateAggregationExpression { + + private DateDiff(Object value) { + super(value); + } + + /** + * Add the number of {@literal units} of the result of the given {@link AggregationExpression expression} to a + * {@link #toDate(Object) start date}. + * + * @param expression must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateDiff diffValueOf(AggregationExpression expression, String unit) { + return diffValue(expression, unit); + } + + /** + * Add the number of {@literal units} from a {@literal field} to a {@link #toDate(Object) start date}. + * + * @param fieldReference must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateDiff diffValueOf(String fieldReference, String unit) { + return diffValue(Fields.field(fieldReference), unit); + } + + /** + * Add the number of {@literal units} to a {@link #toDate(Object) start date}. + * + * @param value must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateDiff diffValue(Object value, String unit) { + + Map args = new HashMap<>(); + args.put("unit", unit); + args.put("endDate", value); + return new DateDiff(args); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateDiff toDateOf(AggregationExpression expression) { + return toDate(expression); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateDiff toDateOf(String fieldReference) { + return toDate(Fields.field(fieldReference)); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param dateExpression anything that evaluates to a valid date. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateDiff toDate(Object dateExpression) { + return new DateDiff(append("startDate", dateExpression)); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateAdd}. + */ + public DateDiff withTimezone(Timezone timezone) { + return new DateDiff(appendTimezone(argumentMap(), timezone)); + } + + /** + * Set the start day of the week if the unit if measure is set to {@literal week}. Uses {@literal Sunday} by + * default. + * + * @param day must not be {@literal null}. + * @return new instance of {@link DateDiff}. + */ + public DateDiff startOfWeek(Object day) { + return new DateDiff(append("startOfWeek", day)); + } + + @Override + protected String getMongoMethod() { + return "$dateDiff"; + } + } + @SuppressWarnings("unchecked") private static T applyTimezone(T instance, Timezone timezone) { return !ObjectUtils.nullSafeEquals(Timezone.none(), timezone) && !instance.hasTimezone() diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 6b4daa15b8..6a60a7df1a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -145,6 +145,7 @@ public class MethodReferenceNode extends ExpressionNode { // DATE OPERATORS map.put("dateAdd", mapArgRef().forOperator("$dateAdd").mappingParametersTo("startDate", "unit", "amount", "timezone")); + map.put("dateDiff", mapArgRef().forOperator("$dateDiff").mappingParametersTo("startDate", "endDate", "unit","timezone", "startOfWeek")); map.put("dayOfYear", singleArgRef().forOperator("$dayOfYear")); map.put("dayOfMonth", singleArgRef().forOperator("$dayOfMonth")); map.put("dayOfWeek", singleArgRef().forOperator("$dayOfWeek")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java index 036edfdce1..ab975b852a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java @@ -40,5 +40,21 @@ void rendersDateAddWithTimezone() { .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse( "{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3, timezone : \"America/Chicago\" } }")); } + + @Test // GH-3713 + void rendersDateDiff() { + + assertThat( + DateOperators.dateOf("purchaseDate").diffValueOf("delivered", "day").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document + .parse("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }")); + } + + @Test // GH-3713 + void rendersDateDiffWithTimezone() { + + assertThat(DateOperators.dateOf("purchaseDate").withTimezone(Timezone.valueOf("America/Chicago")) + .diffValueOf("delivered", "day").toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse( + "{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\", timezone : \"America/Chicago\" } }")); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 337d61f984..33edab1d5a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1044,6 +1044,11 @@ void shouldRenderDateAdd() { assertThat(transform("dateAdd(purchaseDate, 'day', 3)")).isEqualTo(Document.parse("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }")); } + @Test // GH-3713 + void shouldRenderDateDiff() { + assertThat(transform("dateDiff(purchaseDate, delivered, 'day')")).isEqualTo(Document.parse("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From 456c1ad26abb77d82595f7e1589ca02a9a780da9 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 26 Jul 2021 09:18:16 +0200 Subject: [PATCH 061/885] Add shortcut for date aggregation operators working with timezone. See: #3713 Original pull request: #3748. --- .../core/aggregation/DateOperators.java | 26 +++++++++++++++++++ .../aggregation/DateOperatorsUnitTests.java | 6 ++--- 2 files changed, 29 insertions(+), 3 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java index a9b1d411cd..a97d64c52d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java @@ -46,6 +46,19 @@ public static DateOperatorFactory dateOf(String fieldReference) { return new DateOperatorFactory(fieldReference); } + /** + * Take the date referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateOperatorFactory}. + * @since 3.3 + */ + public static DateOperatorFactory zonedDateOf(String fieldReference, Timezone timezone) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new DateOperatorFactory(fieldReference).withTimezone(timezone); + } + /** * Take the date resulting from the given {@link AggregationExpression}. * @@ -58,6 +71,19 @@ public static DateOperatorFactory dateOf(AggregationExpression expression) { return new DateOperatorFactory(expression); } + /** + * Take the date resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateOperatorFactory}. + * @since 3.3 + */ + public static DateOperatorFactory zonedDateOf(AggregationExpression expression, Timezone timezone) { + + Assert.notNull(expression, "Expression must not be null!"); + return new DateOperatorFactory(expression).withTimezone(timezone); + } + /** * Take the given value as date. *

diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java index ab975b852a..95f977ed73 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java @@ -35,8 +35,8 @@ void rendersDateAdd() { @Test // GH-3713 void rendersDateAddWithTimezone() { - - assertThat(DateOperators.dateOf("purchaseDate").withTimezone(Timezone.valueOf("America/Chicago")).add(3, "day") + + assertThat(DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")).add(3, "day") .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse( "{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3, timezone : \"America/Chicago\" } }")); } @@ -53,7 +53,7 @@ void rendersDateDiff() { @Test // GH-3713 void rendersDateDiffWithTimezone() { - assertThat(DateOperators.dateOf("purchaseDate").withTimezone(Timezone.valueOf("America/Chicago")) + assertThat(DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")) .diffValueOf("delivered", "day").toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse( "{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\", timezone : \"America/Chicago\" } }")); } From 24171b3ae27a4bc867fd619420543ce1b56344f5 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 25 Aug 2021 10:59:26 +0200 Subject: [PATCH 062/885] Polishing. Introduce factory methods to convert TimeZone/ZoneId/ZoneOffset into Mongo Timezone. Introduce TemporalUnit abstraction and converters to convert ChronoUnit and TimeUnit into TemporalUnit for date operators accepting a unit parameter. See #3713 Original pull request: #3748. --- .../core/aggregation/DateOperators.java | 294 ++++++++++++++++-- .../aggregation/SetWindowFieldsOperation.java | 63 ++++ .../aggregation/DateOperatorsUnitTests.java | 48 ++- .../SpelExpressionTransformerUnitTests.java | 11 +- .../reference/aggregation-framework.adoc | 2 +- 5 files changed, 376 insertions(+), 42 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java index a97d64c52d..029b994f2e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java @@ -15,10 +15,16 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoUnit; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; +import java.util.Locale; import java.util.Map; +import java.util.TimeZone; +import java.util.concurrent.TimeUnit; import org.springframework.lang.Nullable; import org.springframework.util.Assert; @@ -157,6 +163,7 @@ public static DateFromString dateFromString(String value) { * NOTE: Support for timezones in aggregations Requires MongoDB 3.6 or later. * * @author Christoph Strobl + * @author Mark Paluch * @since 2.1 */ public static class Timezone { @@ -192,6 +199,61 @@ public static Timezone valueOf(Object value) { return new Timezone(value); } + /** + * Create a {@link Timezone} for the given {@link TimeZone} rendering the offset as UTC offset. + * + * @param timeZone {@link TimeZone} rendering the offset as UTC offset. + * @return new instance of {@link Timezone}. + * @since 3.3 + */ + public static Timezone fromOffset(TimeZone timeZone) { + + Assert.notNull(timeZone, "TimeZone must not be null!"); + + return fromOffset( + ZoneOffset.ofTotalSeconds(Math.toIntExact(TimeUnit.MILLISECONDS.toSeconds(timeZone.getRawOffset())))); + } + + /** + * Create a {@link Timezone} for the given {@link ZoneOffset} rendering the offset as UTC offset. + * + * @param offset {@link ZoneOffset} rendering the offset as UTC offset. + * @return new instance of {@link Timezone}. + * @since 3.3 + */ + public static Timezone fromOffset(ZoneOffset offset) { + + Assert.notNull(offset, "ZoneOffset must not be null!"); + return new Timezone(offset.toString()); + } + + /** + * Create a {@link Timezone} for the given {@link TimeZone} rendering the offset as UTC offset. + * + * @param timeZone {@link Timezone} rendering the offset as zone identifier. + * @return new instance of {@link Timezone}. + * @since 3.3 + */ + public static Timezone fromZone(TimeZone timeZone) { + + Assert.notNull(timeZone, "TimeZone must not be null!"); + + return valueOf(timeZone.getID()); + } + + /** + * Create a {@link Timezone} for the given {@link java.time.ZoneId} rendering the offset as UTC offset. + * + * @param zoneId {@link ZoneId} rendering the offset as zone identifier. + * @return new instance of {@link Timezone}. + * @since 3.3 + */ + public static Timezone fromZone(ZoneId zoneId) { + + Assert.notNull(zoneId, "ZoneId must not be null!"); + return new Timezone(zoneId.toString()); + } + /** * Create a {@link Timezone} for the {@link Field} reference holding the Olson Timezone Identifier or UTC Offset. * @@ -212,6 +274,11 @@ public static Timezone ofField(String fieldReference) { public static Timezone ofExpression(AggregationExpression expression) { return valueOf(expression); } + + @Nullable + Object getValue() { + return value; + } } /** @@ -303,32 +370,64 @@ public DateOperatorFactory withTimezone(Timezone timezone) { /** * Creates new {@link AggregationExpression} that adds the value of the given {@link AggregationExpression - * expression} (in {@literal units). @param expression must not be {@literal null}. - * + * expression} (in {@literal units}). + * + * @param expression must not be {@literal null}. * @param unit the unit of measure. Must not be {@literal null}. - * @return new instance of {@link DateAdd}. - * @since 3.3 + * @return new instance of {@link DateAdd}. @since 3.3 */ public DateAdd addValueOf(AggregationExpression expression, String unit) { return applyTimezone(DateAdd.addValueOf(expression, unit).toDate(dateReference()), timezone); } + /** + * Creates new {@link AggregationExpression} that adds the value of the given {@link AggregationExpression + * expression} (in {@literal units}). + * + * @param expression must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateAdd addValueOf(AggregationExpression expression, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + return applyTimezone(DateAdd.addValueOf(expression, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), + timezone); + } + /** * Creates new {@link AggregationExpression} that adds the value stored at the given {@literal field} (in - * {@literal units). @param fieldReference must not be {@literal null}. - * + * {@literal units}). + * + * @param fieldReference must not be {@literal null}. * @param unit the unit of measure. Must not be {@literal null}. - * @return new instance of {@link DateAdd}. - * @since 3.3 + * @return new instance of {@link DateAdd}. @since 3.3 */ public DateAdd addValueOf(String fieldReference, String unit) { return applyTimezone(DateAdd.addValueOf(fieldReference, unit).toDate(dateReference()), timezone); } /** - * Creates new {@link AggregationExpression} that adds the given value (in {@literal units). @param value must not - * be {@literal null}. @param unit the unit of measure. Must not be {@literal null}. - * + * Creates new {@link AggregationExpression} that adds the value stored at the given {@literal field} (in + * {@literal units}). + * + * @param fieldReference must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateAdd addValueOf(String fieldReference, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone( + DateAdd.addValueOf(fieldReference, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the given value (in {@literal units}). + * + * @param value must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. * @return * @since 3.3 new instance of {@link DateAdd}. */ @@ -336,6 +435,22 @@ public DateAdd add(Object value, String unit) { return applyTimezone(DateAdd.addValue(value, unit).toDate(dateReference()), timezone); } + /** + * Creates new {@link AggregationExpression} that adds the given value (in {@literal units}). + * + * @param value must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return + * @since 3.3 new instance of {@link DateAdd}. + */ + public DateAdd add(Object value, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone(DateAdd.addValue(value, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), + timezone); + } + /** * Creates new {@link AggregationExpression} that returns the day of the year for a date as a number between 1 and * 366. @@ -367,41 +482,89 @@ public DayOfWeek dayOfWeek() { } /** - * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units) to the date - * computed by the given {@link AggregationExpression expression}. @param expression must not be {@literal null}. - * + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date + * computed by the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. * @param unit the unit of measure. Must not be {@literal null}. - * @return new instance of {@link DateAdd}. - * @since 3.3 + * @return new instance of {@link DateAdd}. @since 3.3 */ public DateDiff diffValueOf(AggregationExpression expression, String unit) { return applyTimezone(DateDiff.diffValueOf(expression, unit).toDate(dateReference()), timezone); } /** - * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units) to the date stored - * at the given {@literal field}. @param expression must not be {@literal null}. - * + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date + * computed by the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. * @param unit the unit of measure. Must not be {@literal null}. - * @return new instance of {@link DateAdd}. - * @since 3.3 + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diffValueOf(AggregationExpression expression, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone( + DateDiff.diffValueOf(expression, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date stored + * at the given {@literal field}. + * + * @param fieldReference must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 */ public DateDiff diffValueOf(String fieldReference, String unit) { return applyTimezone(DateDiff.diffValueOf(fieldReference, unit).toDate(dateReference()), timezone); } /** - * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units) to the date given - * {@literal value}. @param value anything the resolves to a valid date. Must not be {@literal null}. - * + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date stored + * at the given {@literal field}. + * + * @param fieldReference must not be {@literal null}. * @param unit the unit of measure. Must not be {@literal null}. - * @return new instance of {@link DateAdd}. - * @since 3.3 + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diffValueOf(String fieldReference, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone( + DateDiff.diffValueOf(fieldReference, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date given + * {@literal value}. + * + * @param value anything the resolves to a valid date. Must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 */ public DateDiff diff(Object value, String unit) { return applyTimezone(DateDiff.diffValue(value, unit).toDate(dateReference()), timezone); } + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date given + * {@literal value}. + * + * @param value anything the resolves to a valid date. Must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diff(Object value, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone(DateDiff.diffValue(value, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), + timezone); + } + /** * Creates new {@link AggregationExpression} that returns the year portion of a date. * @@ -2720,6 +2883,85 @@ protected String getMongoMethod() { } } + /** + * Interface defining a temporal unit for date operators. + * + * @author Mark Paluch + * @since 3.3 + */ + public interface TemporalUnit { + + String name(); + + /** + * Converts the given time unit into a {@link TemporalUnit}. Supported units are: days, hours, minutes, seconds, and + * milliseconds. + * + * @param timeUnit the time unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static TemporalUnit from(TimeUnit timeUnit) { + + Assert.notNull(timeUnit, "TimeUnit must not be null"); + + switch (timeUnit) { + case DAYS: + return TemporalUnits.DAY; + case HOURS: + return TemporalUnits.HOUR; + case MINUTES: + return TemporalUnits.MINUTE; + case SECONDS: + return TemporalUnits.SECOND; + case MILLISECONDS: + return TemporalUnits.MILLISECOND; + } + + throw new IllegalArgumentException(String.format("Cannot create TemporalUnit from %s", timeUnit)); + } + + /** + * Converts the given chrono unit into a {@link TemporalUnit}. Supported units are: years, weeks, months, days, + * hours, minutes, seconds, and millis. + * + * @param chronoUnit the chrono unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static TemporalUnit from(ChronoUnit chronoUnit) { + + switch (chronoUnit) { + case YEARS: + return TemporalUnits.YEAR; + case WEEKS: + return TemporalUnits.WEEK; + case MONTHS: + return TemporalUnits.MONTH; + case DAYS: + return TemporalUnits.DAY; + case HOURS: + return TemporalUnits.HOUR; + case MINUTES: + return TemporalUnits.MINUTE; + case SECONDS: + return TemporalUnits.SECOND; + case MILLIS: + return TemporalUnits.MILLISECOND; + } + + throw new IllegalArgumentException(String.format("Cannot create TemporalUnit from %s", chronoUnit)); + } + } + + /** + * Supported temporal units. + */ + enum TemporalUnits implements TemporalUnit { + YEAR, QUARTER, WEEK, MONTH, DAY, HOUR, MINUTE, SECOND, MILLISECOND + + } + @SuppressWarnings("unchecked") private static T applyTimezone(T instance, Timezone timezone) { return !ObjectUtils.nullSafeEquals(Timezone.none(), timezone) && !instance.hasTimezone() diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java index 9c40a0b642..fa01b02b98 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java @@ -15,9 +15,11 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.concurrent.TimeUnit; import org.bson.Document; import org.springframework.data.domain.Sort; @@ -626,7 +628,68 @@ public Document toDocument(AggregationOperationContext ctx) { * The actual time unit to apply to a {@link Window}. */ public interface WindowUnit { + String name(); + + /** + * Converts the given time unit into a {@link WindowUnit}. Supported units are: days, hours, minutes, seconds, and + * milliseconds. + * + * @param timeUnit the time unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static WindowUnit from(TimeUnit timeUnit) { + + Assert.notNull(timeUnit, "TimeUnit must not be null"); + + switch (timeUnit) { + case DAYS: + return WindowUnits.DAY; + case HOURS: + return WindowUnits.HOUR; + case MINUTES: + return WindowUnits.MINUTE; + case SECONDS: + return WindowUnits.SECOND; + case MILLISECONDS: + return WindowUnits.MILLISECOND; + } + + throw new IllegalArgumentException(String.format("Cannot create WindowUnit from %s", timeUnit)); + } + + /** + * Converts the given chrono unit into a {@link WindowUnit}. Supported units are: years, weeks, months, days, hours, + * minutes, seconds, and millis. + * + * @param chronoUnit the chrono unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static WindowUnit from(ChronoUnit chronoUnit) { + + switch (chronoUnit) { + case YEARS: + return WindowUnits.YEAR; + case WEEKS: + return WindowUnits.WEEK; + case MONTHS: + return WindowUnits.MONTH; + case DAYS: + return WindowUnits.DAY; + case HOURS: + return WindowUnits.HOUR; + case MINUTES: + return WindowUnits.MINUTE; + case SECONDS: + return WindowUnits.SECOND; + case MILLIS: + return WindowUnits.MILLISECOND; + } + + throw new IllegalArgumentException(String.format("Cannot create WindowUnit from %s", chronoUnit)); + } } /** diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java index 95f977ed73..6d63b954f8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java @@ -15,14 +15,22 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoUnit; +import java.util.TimeZone; -import org.bson.Document; import org.junit.jupiter.api.Test; + import org.springframework.data.mongodb.core.aggregation.DateOperators.Timezone; /** + * Unit tests for {@link DateOperators}. + * * @author Christoph Strobl + * @author Mark Paluch */ class DateOperatorsUnitTests { @@ -30,15 +38,15 @@ class DateOperatorsUnitTests { void rendersDateAdd() { assertThat(DateOperators.dateOf("purchaseDate").add(3, "day").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }")); + .isEqualTo("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }"); } @Test // GH-3713 void rendersDateAddWithTimezone() { assertThat(DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")).add(3, "day") - .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse( - "{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3, timezone : \"America/Chicago\" } }")); + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + "{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3, timezone : \"America/Chicago\" } }"); } @Test // GH-3713 @@ -46,15 +54,37 @@ void rendersDateDiff() { assertThat( DateOperators.dateOf("purchaseDate").diffValueOf("delivered", "day").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document - .parse("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }")); + .isEqualTo("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }"); } @Test // GH-3713 void rendersDateDiffWithTimezone() { assertThat(DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")) - .diffValueOf("delivered", "day").toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse( - "{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\", timezone : \"America/Chicago\" } }")); + .diffValueOf("delivered", DateOperators.TemporalUnit.from(ChronoUnit.DAYS)) + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + "{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\", timezone : \"America/Chicago\" } }"); + } + + @Test // GH-3713 + void rendersTimezoneFromZoneOffset() { + assertThat(DateOperators.Timezone.fromOffset(ZoneOffset.ofHoursMinutes(3, 30)).getValue()).isEqualTo("+03:30"); + } + + @Test // GH-3713 + void rendersTimezoneFromTimeZoneOffset() { + assertThat(DateOperators.Timezone.fromOffset(TimeZone.getTimeZone("America/Chicago")).getValue()) + .isEqualTo("-06:00"); + } + + @Test // GH-3713 + void rendersTimezoneFromTimeZoneId() { + assertThat(DateOperators.Timezone.fromZone(TimeZone.getTimeZone("America/Chicago")).getValue()) + .isEqualTo("America/Chicago"); + } + + @Test // GH-3713 + void rendersTimezoneFromZoneId() { + assertThat(DateOperators.Timezone.fromZone(ZoneId.of("America/Chicago")).getValue()).isEqualTo("America/Chicago"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 33edab1d5a..bc8da0a3c1 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1034,11 +1034,6 @@ void shouldRenderTanh() { assertThat(transform("tanh(angle)")).isEqualTo("{ \"$tanh\" : \"$angle\"}"); } - private Document transform(String expression, Object... params) { - return (Document) transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); - } - - private Object transformValue(String expression, Object... params) { @Test // GH-3713 void shouldRenderDateAdd() { assertThat(transform("dateAdd(purchaseDate, 'day', 3)")).isEqualTo(Document.parse("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }")); @@ -1049,7 +1044,11 @@ void shouldRenderDateDiff() { assertThat(transform("dateDiff(purchaseDate, delivered, 'day')")).isEqualTo(Document.parse("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }")); } - private Object transform(String expression, Object... params) { + private Document transform(String expression, Object... params) { + return (Document) transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); + } + + private Object transformValue(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); } diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index f23b290697..bc7a032e75 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -100,7 +100,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `literal` | Date Aggregation Operators -| `dayOfYear`, `dayOfMonth`, `dayOfWeek`, `year`, `month`, `week`, `hour`, `minute`, `second`, `millisecond`, `dateToString`, `dateFromString`, `dateFromParts`, `dateToParts`, `isoDayOfWeek`, `isoWeek`, `isoWeekYear` +| `dayOfYear`, `dayOfMonth`, `dayOfWeek`, `year`, `month`, `week`, `hour`, `minute`, `second`, `millisecond`, `dateAdd`, `dateDiff`, `dateToString`, `dateFromString`, `dateFromParts`, `dateToParts`, `isoDayOfWeek`, `isoWeek`, `isoWeekYear` | Variable Operators | `map` From 92cc2a582a8b5770996f11b409f9629678c2ce8e Mon Sep 17 00:00:00 2001 From: Mushtaq Ahmed Date: Sat, 31 Jul 2021 16:52:38 +0530 Subject: [PATCH 063/885] Add support for `$rand` aggregation operator. Closes #3724 Original pull request: #3759 --- .../core/aggregation/ArithmeticOperators.java | 25 +++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 1 + .../ArithmeticOperatorsUnitTests.java | 5 ++++ .../SpelExpressionTransformerUnitTests.java | 5 ++++ 4 files changed, 36 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 7896486abf..9610967830 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -19,6 +19,7 @@ import java.util.List; import java.util.Locale; +import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Avg; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovariancePop; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovarianceSamp; @@ -63,6 +64,16 @@ public static ArithmeticOperatorFactory valueOf(AggregationExpression expression return new ArithmeticOperatorFactory(expression); } + /** + * Creates new {@link AggregationExpression} that returns a random float between 0 and 1 each time it is called. + * + * @return new instance of {@link Rand}. + * @since 3.3 + */ + public static Rand rand() { + return new Rand(); + } + /** * @author Christoph Strobl */ @@ -2671,4 +2682,18 @@ protected String getMongoMethod() { return "$tanh"; } } + + /** + * {@link Rand} returns a floating value between 0 and 1. + * + * @author Mushtaq Ahmed + * @since 3.3 + */ + public static class Rand implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$rand", new Document()); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 6a60a7df1a..a91358353c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -99,6 +99,7 @@ public class MethodReferenceNode extends ExpressionNode { map.put("cosh", singleArgRef().forOperator("$cosh")); map.put("tan", singleArgRef().forOperator("$tan")); map.put("tanh", singleArgRef().forOperator("$tanh")); + map.put("rand", emptyRef().forOperator("$rand")); // STRING OPERATORS map.put("concat", arrayArgRef().forOperator("$concat")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index 7cde7cd1c4..b589e152aa 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -166,4 +166,9 @@ void rendersTanhWithValueInDegrees() { .isEqualTo("{ $tanh : { $degreesToRadians : \"$angle\" } }"); } + + @Test // GH-3724 + void rendersRank() { + assertThat(rand().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(new Document("$rand", new Document())); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index bc8da0a3c1..cc20ffd121 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1044,6 +1044,11 @@ void shouldRenderDateDiff() { assertThat(transform("dateDiff(purchaseDate, delivered, 'day')")).isEqualTo(Document.parse("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }")); } + @Test // GH-3724 + void shouldRenderRand() { + assertThat(transform("rand()")).isEqualTo(Document.parse("{ $rand : {} }")); + } + private Document transform(String expression, Object... params) { return (Document) transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); } From 7c6e951c7c0088459ada9e6c5a7c3ad2a427fbaa Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 25 Aug 2021 11:13:02 +0200 Subject: [PATCH 064/885] Polishing. Add author tags, tweak Javadoc style. Simplify tests. Document operator. See #3724 Original pull request: #3759. --- .../mongodb/core/aggregation/ArithmeticOperators.java | 4 +++- .../core/aggregation/ArithmeticOperatorsUnitTests.java | 4 ++-- .../aggregation/SpelExpressionTransformerUnitTests.java | 8 +++++--- src/main/asciidoc/reference/aggregation-framework.adoc | 2 +- 4 files changed, 11 insertions(+), 7 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 9610967830..8fe3d9120c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -40,6 +40,7 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Mushtaq Ahmed * @since 1.10 */ public class ArithmeticOperators { @@ -65,7 +66,8 @@ public static ArithmeticOperatorFactory valueOf(AggregationExpression expression } /** - * Creates new {@link AggregationExpression} that returns a random float between 0 and 1 each time it is called. + * Creates new {@link AggregationExpression} that returns a random float between {@code 0} and {@code 1} each time it + * is called. * * @return new instance of {@link Rand}. * @since 3.3 diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index b589e152aa..02f76d5c10 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -29,6 +29,7 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Mushtaq Ahmed */ class ArithmeticOperatorsUnitTests { @@ -166,9 +167,8 @@ void rendersTanhWithValueInDegrees() { .isEqualTo("{ $tanh : { $degreesToRadians : \"$angle\" } }"); } - @Test // GH-3724 - void rendersRank() { + void rendersRand() { assertThat(rand().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(new Document("$rand", new Document())); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index cc20ffd121..daba7a21cd 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1036,17 +1036,19 @@ void shouldRenderTanh() { @Test // GH-3713 void shouldRenderDateAdd() { - assertThat(transform("dateAdd(purchaseDate, 'day', 3)")).isEqualTo(Document.parse("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }")); + assertThat(transform("dateAdd(purchaseDate, 'day', 3)")) + .isEqualTo("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }"); } @Test // GH-3713 void shouldRenderDateDiff() { - assertThat(transform("dateDiff(purchaseDate, delivered, 'day')")).isEqualTo(Document.parse("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }")); + assertThat(transform("dateDiff(purchaseDate, delivered, 'day')")) + .isEqualTo("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }"); } @Test // GH-3724 void shouldRenderRand() { - assertThat(transform("rand()")).isEqualTo(Document.parse("{ $rand : {} }")); + assertThat(transform("rand()")).isEqualTo("{ $rand : {} }"); } private Document transform(String expression, Object... params) { diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index bc7a032e75..f96719adde 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -76,7 +76,7 @@ At the time of this writing, we provide support for the following Aggregation Op [cols="2*"] |=== | Pipeline Aggregation Operators -| `bucket`, `bucketAuto`, `count`, `facet`, `geoNear`, `graphLookup`, `group`, `limit`, `lookup`, `match`, `project`, `replaceRoot`, `skip`, `sort`, `unwind` +| `bucket`, `bucketAuto`, `count`, `facet`, `geoNear`, `graphLookup`, `group`, `limit`, `lookup`, `match`, `project`, `rand`, `replaceRoot`, `skip`, `sort`, `unwind` | Set Aggregation Operators | `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue` From 302c8031f90f951fcae2e67b6d471f026f266089 Mon Sep 17 00:00:00 2001 From: sangyongchoi Date: Tue, 3 Aug 2021 23:23:59 +0900 Subject: [PATCH 065/885] Add Criteria infix functions for `maxDistance` and `minDistance`. Closes: #3761 --- .../core/query/TypedCriteriaExtensions.kt | 22 +++++++++ .../query/TypedCriteriaExtensionsTests.kt | 48 +++++++++++++++++++ 2 files changed, 70 insertions(+) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt index b8762ffbe1..eb1868e300 100644 --- a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt @@ -364,6 +364,28 @@ infix fun KProperty>.maxDistance(d: Double): Criteria = infix fun KProperty>.minDistance(d: Double): Criteria = Criteria(asString(this)).minDistance(d) +/** + * Creates a geo-spatial criterion using a $maxDistance operation, for use with $near + * + * See [MongoDB Query operator: + * $maxDistance](https://docs.mongodb.com/manual/reference/operator/query/maxDistance/) + * @author Sangyong Choi + * @since 3.2 + * @see Criteria.maxDistance + */ +infix fun Criteria.maxDistance(d: Double): Criteria = + this.maxDistance(d) + +/** + * Creates a geospatial criterion using a $minDistance operation, for use with $near or + * $nearSphere. + * @author Sangyong Choi + * @since 3.2 + * @see Criteria.minDistance + */ +infix fun Criteria.minDistance(d: Double): Criteria = + this.minDistance(d) + /** * Creates a criterion using the $elemMatch operator * diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt index 54969476d3..3b9dfc9342 100644 --- a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt @@ -317,6 +317,54 @@ class TypedCriteriaExtensionsTests { assertThat(typed).isEqualTo(expected) } + @Test + fun `maxDistance() should equal expected criteria with nearSphere`() { + val point = Point(0.0, 0.0) + + val typed = Building::location nearSphere point maxDistance 3.0 + val expected = Criteria("location") + .nearSphere(point) + .maxDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `minDistance() should equal expected criteria with nearSphere`() { + val point = Point(0.0, 0.0) + + val typed = Building::location nearSphere point minDistance 3.0 + val expected = Criteria("location") + .nearSphere(point) + .minDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `maxDistance() should equal expected criteria with near`() { + val point = Point(0.0, 0.0) + + val typed = Building::location near point maxDistance 3.0 + val expected = Criteria("location") + .near(point) + .maxDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `minDistance() should equal expected criteria with near`() { + val point = Point(0.0, 0.0) + + val typed = Building::location near point minDistance 3.0 + val expected = Criteria("location") + .near(point) + .minDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + @Test fun `elemMatch() should equal expected criteria`() { From 467536cb34162f528ecba3d494e77414bb2cb333 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 25 Aug 2021 14:33:06 +0200 Subject: [PATCH 066/885] Polishing. Update since version. Reformat code. See: #3761. --- .../mongodb/core/query/TypedCriteriaExtensions.kt | 4 ++-- .../core/query/TypedCriteriaExtensionsTests.kt | 11 +++++++---- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt index eb1868e300..ab7e32fc03 100644 --- a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt @@ -370,7 +370,7 @@ infix fun KProperty>.minDistance(d: Double): Criteria = * See [MongoDB Query operator: * $maxDistance](https://docs.mongodb.com/manual/reference/operator/query/maxDistance/) * @author Sangyong Choi - * @since 3.2 + * @since 3.2.5 * @see Criteria.maxDistance */ infix fun Criteria.maxDistance(d: Double): Criteria = @@ -380,7 +380,7 @@ infix fun Criteria.maxDistance(d: Double): Criteria = * Creates a geospatial criterion using a $minDistance operation, for use with $near or * $nearSphere. * @author Sangyong Choi - * @since 3.2 + * @since 3.2.5 * @see Criteria.minDistance */ infix fun Criteria.minDistance(d: Double): Criteria = diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt index 3b9dfc9342..7a5c358fad 100644 --- a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt @@ -25,8 +25,11 @@ import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type import java.util.regex.Pattern /** + * Unit tests for [Criteria] extensions. + * * @author Tjeu Kayim * @author Mark Paluch + * @author Sangyong Choi */ class TypedCriteriaExtensionsTests { @@ -319,8 +322,8 @@ class TypedCriteriaExtensionsTests { @Test fun `maxDistance() should equal expected criteria with nearSphere`() { - val point = Point(0.0, 0.0) + val point = Point(0.0, 0.0) val typed = Building::location nearSphere point maxDistance 3.0 val expected = Criteria("location") .nearSphere(point) @@ -331,8 +334,8 @@ class TypedCriteriaExtensionsTests { @Test fun `minDistance() should equal expected criteria with nearSphere`() { - val point = Point(0.0, 0.0) + val point = Point(0.0, 0.0) val typed = Building::location nearSphere point minDistance 3.0 val expected = Criteria("location") .nearSphere(point) @@ -343,8 +346,8 @@ class TypedCriteriaExtensionsTests { @Test fun `maxDistance() should equal expected criteria with near`() { - val point = Point(0.0, 0.0) + val point = Point(0.0, 0.0) val typed = Building::location near point maxDistance 3.0 val expected = Criteria("location") .near(point) @@ -355,8 +358,8 @@ class TypedCriteriaExtensionsTests { @Test fun `minDistance() should equal expected criteria with near`() { - val point = Point(0.0, 0.0) + val point = Point(0.0, 0.0) val typed = Building::location near point minDistance 3.0 val expected = Criteria("location") .near(point) From 36e2d80d71634a134ed2c6d615be217692b59e9a Mon Sep 17 00:00:00 2001 From: Ivan Volzhev Date: Sat, 21 Aug 2021 08:24:22 +0200 Subject: [PATCH 067/885] Relax requirement for GeoJsonMultiPoint construction allowing creation using a single point. Only 1 point is required per GeoJson RFC and Mongo works just fine with 1 point as well. Closes #3776 Original pull request: #3777. --- .../mongodb/core/geo/GeoJsonMultiPoint.java | 18 ++++++++++++++++-- .../data/mongodb/core/geo/GeoJsonTests.java | 16 ++++++++++++++++ 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java index f42d38e0dc..c1c80b89e8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java @@ -28,6 +28,7 @@ * {@link GeoJsonMultiPoint} is defined as list of {@link Point}s. * * @author Christoph Strobl + * @author Ivan Volzhev * @since 1.7 * @see https://geojson.org/geojson-spec.html#multipoint */ @@ -40,12 +41,12 @@ public class GeoJsonMultiPoint implements GeoJson> { /** * Creates a new {@link GeoJsonMultiPoint} for the given {@link Point}s. * - * @param points points must not be {@literal null} and have at least 2 entries. + * @param points points must not be {@literal null} and have at least 1 entry. */ public GeoJsonMultiPoint(List points) { Assert.notNull(points, "Points must not be null."); - Assert.isTrue(points.size() >= 2, "Minimum of 2 Points required."); + Assert.isTrue(points.size() >= 1, "Minimum of 1 Point required."); this.points = new ArrayList(points); } @@ -69,6 +70,19 @@ public GeoJsonMultiPoint(Point first, Point second, Point... others) { this.points.addAll(Arrays.asList(others)); } + /** + * Creates a new {@link GeoJsonMultiPoint} for the given {@link Point}. + * + * @param point must not be {@literal null}. + */ + public GeoJsonMultiPoint(Point point) { + + Assert.notNull(point, "First point must not be null!"); + + this.points = new ArrayList(); + this.points.add(point); + } + /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.geo.GeoJson#getType() diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonTests.java index fa7115c098..6fa053dacd 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonTests.java @@ -63,6 +63,7 @@ /** * @author Christoph Strobl * @author Mark Paluch + * @author Ivan Volzhev */ @ExtendWith({ MongoClientExtension.class, SpringExtension.class }) @ContextConfiguration @@ -329,6 +330,21 @@ public void shouldSaveAndRetrieveDocumentWithGeoJsonMultiPointTypeCorrectly() { assertThat(result.geoJsonMultiPoint).isEqualTo(obj.geoJsonMultiPoint); } + @Test // DATAMONGO-3776 + public void shouldSaveAndRetrieveDocumentWithGeoJsonMultiPointTypeWithOnePointCorrectly() { + + DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); + obj.id = "geoJsonMultiPoint"; + obj.geoJsonMultiPoint = new GeoJsonMultiPoint(new Point(0, 0)); + + template.save(obj); + + DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), + DocumentWithPropertyUsingGeoJsonType.class); + + assertThat(result.geoJsonMultiPoint).isEqualTo(obj.geoJsonMultiPoint); + } + @Test // DATAMONGO-1137 public void shouldSaveAndRetrieveDocumentWithGeoJsonMultiPolygonTypeCorrectly() { From f71f1074455042e774030314e208950d9e570fad Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 25 Aug 2021 14:57:02 +0200 Subject: [PATCH 068/885] Polishing. Reorder methods. Add since tag. Simplify assertions. Use diamond syntax. See: #3776 Original pull request: #3777. --- .../mongodb/core/geo/GeoJsonMultiPoint.java | 37 ++++++++++--------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java index c1c80b89e8..30af9f7293 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java @@ -38,17 +38,31 @@ public class GeoJsonMultiPoint implements GeoJson> { private final List points; + /** + * Creates a new {@link GeoJsonMultiPoint} for the given {@link Point}. + * + * @param point must not be {@literal null}. + * @since 3.2.5 + */ + public GeoJsonMultiPoint(Point point) { + + Assert.notNull(point, "Point must not be null!"); + + this.points = new ArrayList<>(); + this.points.add(point); + } + /** * Creates a new {@link GeoJsonMultiPoint} for the given {@link Point}s. * - * @param points points must not be {@literal null} and have at least 1 entry. + * @param points points must not be {@literal null} and not empty */ public GeoJsonMultiPoint(List points) { - Assert.notNull(points, "Points must not be null."); - Assert.isTrue(points.size() >= 1, "Minimum of 1 Point required."); + Assert.notNull(points, "Points must not be null!"); + Assert.notEmpty(points, "Points must contain at least one point!"); - this.points = new ArrayList(points); + this.points = new ArrayList<>(points); } /** @@ -64,25 +78,12 @@ public GeoJsonMultiPoint(Point first, Point second, Point... others) { Assert.notNull(second, "Second point must not be null!"); Assert.notNull(others, "Additional points must not be null!"); - this.points = new ArrayList(); + this.points = new ArrayList<>(); this.points.add(first); this.points.add(second); this.points.addAll(Arrays.asList(others)); } - /** - * Creates a new {@link GeoJsonMultiPoint} for the given {@link Point}. - * - * @param point must not be {@literal null}. - */ - public GeoJsonMultiPoint(Point point) { - - Assert.notNull(point, "First point must not be null!"); - - this.points = new ArrayList(); - this.points.add(point); - } - /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.geo.GeoJson#getType() From 297ef9823920008245f6271acc1e1212991d89e0 Mon Sep 17 00:00:00 2001 From: divya srivastava Date: Mon, 23 Aug 2021 17:33:06 +0530 Subject: [PATCH 069/885] Add support for `$regexFind`, `$regexFindAll`, and `$regexMatch` aggregation operators. Closes #3725 Original pull request: #3781. --- .../core/aggregation/StringOperators.java | 438 ++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 3 + .../ProjectionOperationUnitTests.java | 27 ++ .../SpelExpressionTransformerUnitTests.java | 64 +++ .../aggregation/StringOperatorsUnitTests.java | 106 +++++ 5 files changed, 638 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java index 5a31f6b3fc..710c6c855e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java @@ -515,6 +515,120 @@ public RTrim rtrim(AggregationExpression expression) { private RTrim createRTrim() { return usesFieldRef() ? RTrim.valueOf(fieldReference) : RTrim.valueOf(expression); } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * regular expression to find the document with the first match.
+ * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link RegexFind}. + */ + public RegexFind regexFind(String regex) { + return createRegexFind().regex(regex); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression resulting from the given {@link AggregationExpression} to find the document with the first match.
+ * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link RegexFind}. + */ + public RegexFind regexFind(AggregationExpression expression) { + return createRegexFind().regexOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression with the options specified in the argument to find the document with the first match. + * + * @param regex the regular expression to apply + * @param options the options to use + * @return new instance of {@link RegexFind}. + */ + public RegexFind regexFind(String regex,String options) { + return createRegexFind().regex(regex).options(options); + } + + private RegexFind createRegexFind() { + return usesFieldRef() ? RegexFind.valueOf(fieldReference) : RegexFind.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * regular expression to find all the documents with the match.
+ * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regexFindAll(String regex) { + return createRegexFindAll().regex(regex); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression resulting from the given {@link AggregationExpression} to find all the documents with the match..
+ * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regexFindAll(AggregationExpression expression) { + return createRegexFindAll().regexOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression with the options specified in the argument to find all the documents with the match.. + * + * @param regex the regular expression to apply + * @param options the options to use + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regexFindAll(String regex,String options) { + return createRegexFindAll().regex(regex).options(options); + } + + private RegexFindAll createRegexFindAll() { + return usesFieldRef() ? RegexFindAll.valueOf(fieldReference) : RegexFindAll.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * regular expression to find if a match is found or not.
+ * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regexMatch(String regex) { + return createRegexMatch().regex(regex); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression resulting from the given {@link AggregationExpression} to find if a match is found or not.
+ * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regexMatch(AggregationExpression expression) { + return createRegexMatch().regexOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression with the options specified in the argument to find if a match is found or not. + * + * @param regex the regular expression to apply + * @param options the options to use + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regexMatch(String regex,String options) { + return createRegexMatch().regex(regex).options(options); + } + + private RegexMatch createRegexMatch() { + return usesFieldRef() ? RegexMatch.valueOf(fieldReference) : RegexMatch.valueOf(expression); + } private boolean usesFieldRef() { return fieldReference != null; @@ -1477,4 +1591,328 @@ protected String getMongoMethod() { return "$rtrim"; } } + + /** + * {@link AggregationExpression} for {@code $regexFind} which applies a regular expression (regex) to a string and + * returns information on the first matched substring.
+ * NOTE: Requires MongoDB 4.0 or later. + * + */ + public static class RegexFind extends AbstractAggregationExpression { + + protected RegexFind(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$regexFind"; + } + + /** + * Creates new {@link RegexFind} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public static RegexFind valueOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexFind(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link RegexFind} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public static RegexFind valueOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFind(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the options to use with the regular expression. + * + * @param options must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind options(String options) { + Assert.notNull(options, "Options must not be null!"); + return new RegexFind(append("options", options)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular expression. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind optionsOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexFind(append("options", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular expression. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind optionsOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFind(append("options", expression)); + } + + /** + * Optional specify the regular expression to apply. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind regex(String regex) { + Assert.notNull(regex, "Regex must not be null!"); + return new RegexFind(append("regex",regex)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the regular expression to apply. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind regexOf(String fieldReference) { + Assert.notNull(fieldReference, "fieldReference must not be null!"); + return new RegexFind(append("regex",Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the regular expression to apply. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind regexOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFind(append("regex",expression)); + } + + } + + /** + * {@link AggregationExpression} for {@code $regexFindAll} which applies a regular expression (regex) to a string and + * returns information on all the matched substrings.
+ * NOTE: Requires MongoDB 4.0 or later. + * + */ + public static class RegexFindAll extends AbstractAggregationExpression { + + protected RegexFindAll(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$regexFindAll"; + } + + /** + * Creates new {@link RegexFindAll} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public static RegexFindAll valueOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexFindAll(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link RegexFindAll} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public static RegexFindAll valueOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFindAll(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the options to use with the regular expression. + * + * @param options must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll options(String options) { + Assert.notNull(options, "Options must not be null!"); + return new RegexFindAll(append("options", options)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular expression. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll optionsOf(String fieldReference) { + Assert.notNull(fieldReference, "fieldReference must not be null!"); + return new RegexFindAll(append("options", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular expression. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll optionsOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFindAll(append("options", expression)); + } + + /** + * Optional specify the regular expression to apply. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regex(String regex) { + Assert.notNull(regex, "Regex must not be null!"); + return new RegexFindAll(append("regex",regex)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the regular expression to apply. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regexOf(String fieldReference) { + Assert.notNull(fieldReference, "fieldReference must not be null!"); + return new RegexFindAll(append("regex",Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the regular expression to apply. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regexOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFindAll(append("regex",expression)); + } + + } + + /** + * {@link AggregationExpression} for {@code $regexMatch} which applies a regular expression (regex) to a string and + * returns a boolean that indicates if a match is found or not.
+ * NOTE: Requires MongoDB 4.0 or later. + * + */ + public static class RegexMatch extends AbstractAggregationExpression { + + protected RegexMatch(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$regexMatch"; + } + + /** + * Creates new {@link RegexMatch} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public static RegexMatch valueOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexMatch(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link RegexMatch} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public static RegexMatch valueOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexMatch(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the options to use with the regular expression. + * + * @param options must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch options(String options) { + Assert.notNull(options, "Options must not be null!"); + return new RegexMatch(append("options", options)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular expression. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch optionsOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexMatch(append("options", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular expression. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch optionsOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexMatch(append("options", expression)); + } + + /** + * Optional specify the regular expression to apply. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regex(String regex) { + Assert.notNull(regex, "Regex must not be null!"); + return new RegexMatch(append("regex",regex)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the regular expression to apply. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regexOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexMatch(append("regex",Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the regular expression to apply. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regexOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexMatch(append("regex",expression)); + } + + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index a91358353c..0fbfe51f09 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -116,6 +116,9 @@ public class MethodReferenceNode extends ExpressionNode { map.put("trim", mapArgRef().forOperator("$trim").mappingParametersTo("input", "chars")); map.put("ltrim", mapArgRef().forOperator("$ltrim").mappingParametersTo("input", "chars")); map.put("rtrim", mapArgRef().forOperator("$rtrim").mappingParametersTo("input", "chars")); + map.put("regexFind", mapArgRef().forOperator("$regexFind").mappingParametersTo("input", "regex" , "options")); + map.put("regexFindAll", mapArgRef().forOperator("$regexFindAll").mappingParametersTo("input", "regex" , "options")); + map.put("regexMatch", mapArgRef().forOperator("$regexMatch").mappingParametersTo("input", "regex" , "options")); // TEXT SEARCH OPERATORS map.put("meta", singleArgRef().forOperator("$meta")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java index 805fc10f38..ff6771d9f1 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java @@ -1766,6 +1766,33 @@ public void shouldRenderSubstrCPCorrectly() { assertThat(agg) .isEqualTo(Document.parse("{ $project : { yearSubstring: { $substrCP: [ \"$quarter\", 0, 2 ] } } }")); } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindCorrectly() { + + Document agg = project().and(StringOperators.valueOf("field1").regexFind("e")).as("regex") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { regex: { $regexFind: { \"input\" : \"$field1\", \"regex\" : \"e\" } } } }")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindAllCorrectly() { + + Document agg = project().and(StringOperators.valueOf("field1").regexFindAll("e")).as("regex") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { regex: { $regexFindAll: { \"input\" : \"$field1\", \"regex\" : \"e\" } } } }")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexMatchCorrectly() { + + Document agg = project().and(StringOperators.valueOf("field1").regexMatch("e")).as("regex") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { regex: { $regexMatch: { \"input\" : \"$field1\", \"regex\" : \"e\" } } } }")); + } @Test // DATAMONGO-1548 public void shouldRenderIndexOfArrayCorrectly() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index daba7a21cd..41b0323636 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -800,6 +800,70 @@ void shouldRenderRtrimWithCharsFromFieldReference() { assertThat(transform("rtrim(field1, field2)")) .isEqualTo("{ \"$rtrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}"); } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexFindWithoutOptions() { + + assertThat(transform("regexFind(field1,'e')")) + .isEqualTo(Document.parse("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}")); + } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexFindWithOptions() { + + assertThat(transform("regexFind(field1,'e','i')")) + .isEqualTo(Document.parse("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}")); + } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexFindWithOptionsFromFieldReference() { + + assertThat(transform("regexFind(field1,'e',field2)")) + .isEqualTo(Document.parse("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}")); + } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexFindAllWithoutOptions() { + + assertThat(transform("regexFindAll(field1,'e')")) + .isEqualTo(Document.parse("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}")); + } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexFindAllWithOptions() { + + assertThat(transform("regexFindAll(field1,'e','i')")) + .isEqualTo(Document.parse("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}")); + } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexFindAllWithOptionsFromFieldReference() { + + assertThat(transform("regexFindAll(field1,'e',field2)")) + .isEqualTo(Document.parse("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}")); + } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexMatchWithoutOptions() { + + assertThat(transform("regexMatch(field1,'e')")) + .isEqualTo(Document.parse("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}")); + } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexMatchWithOptions() { + + assertThat(transform("regexMatch(field1,'e','i')")) + .isEqualTo(Document.parse("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}")); + } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexMatchWithOptionsFromFieldReference() { + + assertThat(transform("regexMatch(field1,'e',field2)")) + .isEqualTo(Document.parse("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}")); + } + @Test // DATAMONGO-2077 void shouldRenderConvertWithoutOptionalParameters() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java index 0dbe362ae4..cdd0b38dbc 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java @@ -144,5 +144,111 @@ public void shouldRenderRTrimWithCharsExpression() { assertThat(StringOperators.valueOf("shrewd").rtrim(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo(Document.parse("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } ")); } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindAll() { + + assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindAllForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).regexFindAll("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindAllForRegexExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexFindAll(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindAllWithOptions() { + + assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindAllWithOptionsExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + " } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexMatch() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexMatchForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).regexMatch("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexMatchForRegexExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexMatchWithOptions() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexMatchWithOptionsExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + " } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFind() { + + assertThat(StringOperators.valueOf("shrewd").regexFind("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).regexFind("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFind: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindForRegexExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexFind(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindWithOptions() { + + assertThat(StringOperators.valueOf("shrewd").regexFind("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFind: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindWithOptionsExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexFind("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFind: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + " } } ")); + } + } From 69b582823a53cf3a1b8a07354da942bd94432f4f Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Thu, 26 Aug 2021 12:20:11 +0200 Subject: [PATCH 070/885] Polishing. Add support for Pattern. Extract Regex flags translation from Criteria into RegexFlags utility class. Add since and author tags. Simplify tests. Update reference documentation. See #3725. Original pull request: #3781. --- .../core/aggregation/StringOperators.java | 371 ++++++++++----- .../data/mongodb/core/query/Criteria.java | 58 +-- .../data/mongodb/util/RegexFlags.java | 116 +++++ .../ProjectionOperationUnitTests.java | 433 +++++++++--------- .../SpelExpressionTransformerUnitTests.java | 89 ++-- .../aggregation/StringOperatorsUnitTests.java | 226 +++++---- .../reference/aggregation-framework.adoc | 2 +- 7 files changed, 776 insertions(+), 519 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java index 710c6c855e..8b6bb03875 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java @@ -18,8 +18,11 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Map; +import java.util.regex.Pattern; import org.springframework.data.domain.Range; +import org.springframework.data.mongodb.util.RegexFlags; import org.springframework.util.Assert; /** @@ -27,6 +30,7 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Divya Srivastava * @since 1.10 */ public class StringOperators { @@ -515,117 +519,170 @@ public RTrim rtrim(AggregationExpression expression) { private RTrim createRTrim() { return usesFieldRef() ? RTrim.valueOf(fieldReference) : RTrim.valueOf(expression); } - + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given * regular expression to find the document with the first match.
* NOTE: Requires MongoDB 4.0 or later. * + * @param regex must not be {@literal null}. * @return new instance of {@link RegexFind}. + * @since 3.3 */ public RegexFind regexFind(String regex) { return createRegexFind().regex(regex); } - + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular - * expression resulting from the given {@link AggregationExpression} to find the document with the first match.
+ * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression resulting from the given {@link AggregationExpression} to find the document with the first + * match.
* NOTE: Requires MongoDB 4.0 or later. * + * @param expression must not be {@literal null}. * @return new instance of {@link RegexFind}. + * @since 3.3 */ public RegexFind regexFind(AggregationExpression expression) { return createRegexFind().regexOf(expression); } - + + /** + * Creates new {@link AggregationExpression} that takes the {@link Pattern} and applies the regular expression with + * the options specified in the argument to find the document with the first match. + * + * @param pattern the pattern object to apply. + * @return new instance of {@link RegexFind}. + * @since 3.3 + */ + public RegexFind regexFind(Pattern pattern) { + return createRegexFind().pattern(pattern); + } + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular * expression with the options specified in the argument to find the document with the first match. * - * @param regex the regular expression to apply - * @param options the options to use + * @param regex the regular expression to apply. + * @param options the options to use. * @return new instance of {@link RegexFind}. + * @since 3.3 */ - public RegexFind regexFind(String regex,String options) { + public RegexFind regexFind(String regex, String options) { return createRegexFind().regex(regex).options(options); } - + private RegexFind createRegexFind() { return usesFieldRef() ? RegexFind.valueOf(fieldReference) : RegexFind.valueOf(expression); } - + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given * regular expression to find all the documents with the match.
* NOTE: Requires MongoDB 4.0 or later. * + * @param regex must not be {@literal null}. * @return new instance of {@link RegexFindAll}. + * @since 3.3 */ public RegexFindAll regexFindAll(String regex) { return createRegexFindAll().regex(regex); } - + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular - * expression resulting from the given {@link AggregationExpression} to find all the documents with the match..
+ * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression resulting from the given {@link AggregationExpression} to find all the documents with the + * match..
* NOTE: Requires MongoDB 4.0 or later. * + * @param expression must not be {@literal null}. * @return new instance of {@link RegexFindAll}. + * @since 3.3 */ public RegexFindAll regexFindAll(AggregationExpression expression) { return createRegexFindAll().regexOf(expression); } - + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular - * expression with the options specified in the argument to find all the documents with the match.. + * Creates new {@link AggregationExpression} that takes a {@link Pattern} and applies the regular expression with + * the options specified in the argument to find all the documents with the match. * - * @param regex the regular expression to apply - * @param options the options to use + * @param pattern the pattern object to apply. * @return new instance of {@link RegexFindAll}. + * @since 3.3 */ - public RegexFindAll regexFindAll(String regex,String options) { + public RegexFindAll regexFindAll(Pattern pattern) { + return createRegexFindAll().pattern(pattern); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression with the options specified in the argument to find all the documents with the match. + * + * @param regex the regular expression to apply. + * @param options the options to use. + * @return new instance of {@link RegexFindAll}. + * @since 3.3 + */ + public RegexFindAll regexFindAll(String regex, String options) { return createRegexFindAll().regex(regex).options(options); } - + private RegexFindAll createRegexFindAll() { return usesFieldRef() ? RegexFindAll.valueOf(fieldReference) : RegexFindAll.valueOf(expression); } - + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given * regular expression to find if a match is found or not.
* NOTE: Requires MongoDB 4.0 or later. * + * @param regex must not be {@literal null}. * @return new instance of {@link RegexMatch}. + * @since 3.3 */ public RegexMatch regexMatch(String regex) { return createRegexMatch().regex(regex); } - + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular * expression resulting from the given {@link AggregationExpression} to find if a match is found or not.
* NOTE: Requires MongoDB 4.0 or later. * + * @param expression must not be {@literal null}. * @return new instance of {@link RegexMatch}. + * @since 3.3 */ public RegexMatch regexMatch(AggregationExpression expression) { return createRegexMatch().regexOf(expression); } - + + /** + * Creates new {@link AggregationExpression} that takes a {@link Pattern} and applies the regular expression with + * the options specified in the argument to find if a match is found or not. + * + * @param pattern the pattern object to apply. + * @return new instance of {@link RegexMatch}. + * @since 3.3 + */ + public RegexMatch regexMatch(Pattern pattern) { + return createRegexMatch().pattern(pattern); + } + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular * expression with the options specified in the argument to find if a match is found or not. * - * @param regex the regular expression to apply - * @param options the options to use + * @param regex the regular expression to apply. + * @param options the options to use. * @return new instance of {@link RegexMatch}. + * @since 3.3 */ - public RegexMatch regexMatch(String regex,String options) { + public RegexMatch regexMatch(String regex, String options) { return createRegexMatch().regex(regex).options(options); } - + private RegexMatch createRegexMatch() { return usesFieldRef() ? RegexMatch.valueOf(fieldReference) : RegexMatch.valueOf(expression); } @@ -1591,35 +1648,35 @@ protected String getMongoMethod() { return "$rtrim"; } } - + /** - * {@link AggregationExpression} for {@code $regexFind} which applies a regular expression (regex) to a string and + * {@link AggregationExpression} for {@code $regexFind} which applies a regular expression (regex) to a string and * returns information on the first matched substring.
* NOTE: Requires MongoDB 4.0 or later. * + * @author Divya Srivastava + * @since 3.3 */ public static class RegexFind extends AbstractAggregationExpression { - + protected RegexFind(Object value) { super(value); } - @Override - protected String getMongoMethod() { - return "$regexFind"; - } - /** - * Creates new {@link RegexFind} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * Creates new {@link RegexFind} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexFind}. */ public static RegexFind valueOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexFind(Collections.singletonMap("input", Fields.field(fieldReference))); } - + /** * Creates new {@link RegexFind} using the result of the provided {@link AggregationExpression} as {@literal input} * value. @@ -1628,10 +1685,12 @@ public static RegexFind valueOf(String fieldReference) { * @return new instance of {@link RegexFind}. */ public static RegexFind valueOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFind(Collections.singletonMap("input", expression)); } - + /** * Optional specify the options to use with the regular expression. * @@ -1639,72 +1698,108 @@ public static RegexFind valueOf(AggregationExpression expression) { * @return new instance of {@link RegexFind}. */ public RegexFind options(String options) { + Assert.notNull(options, "Options must not be null!"); + return new RegexFind(append("options", options)); } - + /** - * Optional specify the reference to the {@link Field field} holding the options values to use with the regular expression. + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular + * expression. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexFind}. */ public RegexFind optionsOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexFind(append("options", Fields.field(fieldReference))); } - + /** - * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular expression. + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular + * expression. * * @param expression must not be {@literal null}. * @return new instance of {@link RegexFind}. */ public RegexFind optionsOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFind(append("options", expression)); } - + /** - * Optional specify the regular expression to apply. + * Specify the regular expression to apply. * * @param regex must not be {@literal null}. * @return new instance of {@link RegexFind}. */ public RegexFind regex(String regex) { + Assert.notNull(regex, "Regex must not be null!"); - return new RegexFind(append("regex",regex)); + + return new RegexFind(append("regex", regex)); } - + /** - * Optional specify the reference to the {@link Field field} holding the regular expression to apply. + * Apply a {@link Pattern} into {@code regex} and {@code options} fields. + * + * @param pattern must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind pattern(Pattern pattern) { + + Assert.notNull(pattern, "Pattern must not be null!"); + + Map regex = append("regex", pattern.pattern()); + regex.put("options", RegexFlags.toRegexOptions(pattern.flags())); + + return new RegexFind(regex); + } + + /** + * Specify the reference to the {@link Field field} holding the regular expression to apply. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexFind}. */ public RegexFind regexOf(String fieldReference) { + Assert.notNull(fieldReference, "fieldReference must not be null!"); - return new RegexFind(append("regex",Fields.field(fieldReference))); + + return new RegexFind(append("regex", Fields.field(fieldReference))); } - + /** - * Optional specify the {@link AggregationExpression} evaluating to the regular expression to apply. + * Specify the {@link AggregationExpression} evaluating to the regular expression to apply. * * @param expression must not be {@literal null}. * @return new instance of {@link RegexFind}. */ public RegexFind regexOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); - return new RegexFind(append("regex",expression)); + + return new RegexFind(append("regex", expression)); } + @Override + protected String getMongoMethod() { + return "$regexFind"; + } } - + /** - * {@link AggregationExpression} for {@code $regexFindAll} which applies a regular expression (regex) to a string and + * {@link AggregationExpression} for {@code $regexFindAll} which applies a regular expression (regex) to a string and * returns information on all the matched substrings.
* NOTE: Requires MongoDB 4.0 or later. * + * @author Divya Srivastava + * @since 3.3 */ public static class RegexFindAll extends AbstractAggregationExpression { @@ -1712,13 +1807,9 @@ protected RegexFindAll(Object value) { super(value); } - @Override - protected String getMongoMethod() { - return "$regexFindAll"; - } - /** - * Creates new {@link RegexFindAll} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * Creates new {@link RegexFindAll} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexFindAll}. @@ -1727,19 +1818,21 @@ public static RegexFindAll valueOf(String fieldReference) { Assert.notNull(fieldReference, "FieldReference must not be null!"); return new RegexFindAll(Collections.singletonMap("input", Fields.field(fieldReference))); } - + /** - * Creates new {@link RegexFindAll} using the result of the provided {@link AggregationExpression} as {@literal input} - * value. + * Creates new {@link RegexFindAll} using the result of the provided {@link AggregationExpression} as + * {@literal input} value. * * @param expression must not be {@literal null}. * @return new instance of {@link RegexFindAll}. */ public static RegexFindAll valueOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFindAll(Collections.singletonMap("input", expression)); } - + /** * Optional specify the options to use with the regular expression. * @@ -1747,72 +1840,108 @@ public static RegexFindAll valueOf(AggregationExpression expression) { * @return new instance of {@link RegexFindAll}. */ public RegexFindAll options(String options) { + Assert.notNull(options, "Options must not be null!"); + return new RegexFindAll(append("options", options)); } - + /** - * Optional specify the reference to the {@link Field field} holding the options values to use with the regular expression. + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular + * expression. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexFindAll}. */ public RegexFindAll optionsOf(String fieldReference) { + Assert.notNull(fieldReference, "fieldReference must not be null!"); + return new RegexFindAll(append("options", Fields.field(fieldReference))); } - + /** - * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular expression. + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular + * expression. * * @param expression must not be {@literal null}. * @return new instance of {@link RegexFindAll}. */ public RegexFindAll optionsOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFindAll(append("options", expression)); } - + /** - * Optional specify the regular expression to apply. + * Apply a {@link Pattern} into {@code regex} and {@code options} fields. + * + * @param pattern must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll pattern(Pattern pattern) { + + Assert.notNull(pattern, "Pattern must not be null!"); + + Map regex = append("regex", pattern.pattern()); + regex.put("options", RegexFlags.toRegexOptions(pattern.flags())); + + return new RegexFindAll(regex); + } + + /** + * Specify the regular expression to apply. * * @param regex must not be {@literal null}. * @return new instance of {@link RegexFindAll}. */ public RegexFindAll regex(String regex) { + Assert.notNull(regex, "Regex must not be null!"); - return new RegexFindAll(append("regex",regex)); + + return new RegexFindAll(append("regex", regex)); } - + /** - * Optional specify the reference to the {@link Field field} holding the regular expression to apply. + * Specify the reference to the {@link Field field} holding the regular expression to apply. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexFindAll}. */ public RegexFindAll regexOf(String fieldReference) { + Assert.notNull(fieldReference, "fieldReference must not be null!"); - return new RegexFindAll(append("regex",Fields.field(fieldReference))); + + return new RegexFindAll(append("regex", Fields.field(fieldReference))); } - + /** - * Optional specify the {@link AggregationExpression} evaluating to the regular expression to apply. + * Specify the {@link AggregationExpression} evaluating to the regular expression to apply. * * @param expression must not be {@literal null}. * @return new instance of {@link RegexFindAll}. */ public RegexFindAll regexOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); - return new RegexFindAll(append("regex",expression)); + + return new RegexFindAll(append("regex", expression)); } + @Override + protected String getMongoMethod() { + return "$regexFindAll"; + } } - + /** - * {@link AggregationExpression} for {@code $regexMatch} which applies a regular expression (regex) to a string and + * {@link AggregationExpression} for {@code $regexMatch} which applies a regular expression (regex) to a string and * returns a boolean that indicates if a match is found or not.
* NOTE: Requires MongoDB 4.0 or later. * + * @author Divya Srivastava + * @since 3.3 */ public static class RegexMatch extends AbstractAggregationExpression { @@ -1820,22 +1949,20 @@ protected RegexMatch(Object value) { super(value); } - @Override - protected String getMongoMethod() { - return "$regexMatch"; - } - /** - * Creates new {@link RegexMatch} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * Creates new {@link RegexMatch} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexMatch}. */ public static RegexMatch valueOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexMatch(Collections.singletonMap("input", Fields.field(fieldReference))); } - + /** * Creates new {@link RegexMatch} using the result of the provided {@link AggregationExpression} as {@literal input} * value. @@ -1844,10 +1971,12 @@ public static RegexMatch valueOf(String fieldReference) { * @return new instance of {@link RegexMatch}. */ public static RegexMatch valueOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexMatch(Collections.singletonMap("input", expression)); } - + /** * Optional specify the options to use with the regular expression. * @@ -1855,54 +1984,82 @@ public static RegexMatch valueOf(AggregationExpression expression) { * @return new instance of {@link RegexMatch}. */ public RegexMatch options(String options) { + Assert.notNull(options, "Options must not be null!"); + return new RegexMatch(append("options", options)); } - + /** - * Optional specify the reference to the {@link Field field} holding the options values to use with the regular expression. + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular + * expression. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexMatch}. */ public RegexMatch optionsOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexMatch(append("options", Fields.field(fieldReference))); } - + /** - * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular expression. + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular + * expression. * * @param expression must not be {@literal null}. * @return new instance of {@link RegexMatch}. */ public RegexMatch optionsOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexMatch(append("options", expression)); } - + /** - * Optional specify the regular expression to apply. + * Apply a {@link Pattern} into {@code regex} and {@code options} fields. + * + * @param pattern must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch pattern(Pattern pattern) { + + Assert.notNull(pattern, "Pattern must not be null!"); + + Map regex = append("regex", pattern.pattern()); + regex.put("options", RegexFlags.toRegexOptions(pattern.flags())); + + return new RegexMatch(regex); + } + + /** + * Specify the regular expression to apply. * * @param regex must not be {@literal null}. * @return new instance of {@link RegexMatch}. */ public RegexMatch regex(String regex) { + Assert.notNull(regex, "Regex must not be null!"); - return new RegexMatch(append("regex",regex)); + + return new RegexMatch(append("regex", regex)); } - + /** - * Optional specify the reference to the {@link Field field} holding the regular expression to apply. + * Specify the reference to the {@link Field field} holding the regular expression to apply. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexMatch}. */ public RegexMatch regexOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new RegexMatch(append("regex",Fields.field(fieldReference))); + + return new RegexMatch(append("regex", Fields.field(fieldReference))); } - + /** * Optional specify the {@link AggregationExpression} evaluating to the regular expression to apply. * @@ -1910,9 +2067,15 @@ public RegexMatch regexOf(String fieldReference) { * @return new instance of {@link RegexMatch}. */ public RegexMatch regexOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); - return new RegexMatch(append("regex",expression)); + + return new RegexMatch(append("regex", expression)); } + @Override + protected String getMongoMethod() { + return "$regexMatch"; + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java index 9b1e8df940..f9a354c38f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java @@ -42,6 +42,7 @@ import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.util.RegexFlags; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.Base64Utils; @@ -71,20 +72,6 @@ public class Criteria implements CriteriaDefinition { */ private static final Object NOT_SET = new Object(); - private static final int[] FLAG_LOOKUP = new int[Character.MAX_VALUE]; - - static { - FLAG_LOOKUP['g'] = 256; - FLAG_LOOKUP['i'] = Pattern.CASE_INSENSITIVE; - FLAG_LOOKUP['m'] = Pattern.MULTILINE; - FLAG_LOOKUP['s'] = Pattern.DOTALL; - FLAG_LOOKUP['c'] = Pattern.CANON_EQ; - FLAG_LOOKUP['x'] = Pattern.COMMENTS; - FLAG_LOOKUP['d'] = Pattern.UNIX_LINES; - FLAG_LOOKUP['t'] = Pattern.LITERAL; - FLAG_LOOKUP['u'] = Pattern.UNICODE_CASE; - } - private @Nullable String key; private List criteriaChain; private LinkedHashMap criteria = new LinkedHashMap(); @@ -530,7 +517,7 @@ private Pattern toPattern(String regex, @Nullable String options) { Assert.notNull(regex, "Regex string must not be null!"); - return Pattern.compile(regex, regexFlags(options)); + return Pattern.compile(regex, RegexFlags.toRegexFlags(options)); } /** @@ -1099,47 +1086,6 @@ private static boolean requiresGeoJsonFormat(Object value) { || (value instanceof GeoCommand && ((GeoCommand) value).getShape() instanceof GeoJson); } - /** - * Lookup the MongoDB specific flags for a given regex option string. - * - * @param s the Regex option/flag to look up. Can be {@literal null}. - * @return zero if given {@link String} is {@literal null} or empty. - * @since 2.2 - */ - private static int regexFlags(@Nullable String s) { - - int flags = 0; - - if (s == null) { - return flags; - } - - for (final char f : s.toLowerCase().toCharArray()) { - flags |= regexFlag(f); - } - - return flags; - } - - /** - * Lookup the MongoDB specific flags for a given character. - * - * @param c the Regex option/flag to look up. - * @return - * @throws IllegalArgumentException for unknown flags - * @since 2.2 - */ - private static int regexFlag(char c) { - - int flag = FLAG_LOOKUP[c]; - - if (flag == 0) { - throw new IllegalArgumentException(String.format("Unrecognized flag [%c]", c)); - } - - return flag; - } - /** * MongoDB specific bitwise query * operators like {@code $bitsAllClear, $bitsAllSet,...} for usage with {@link Criteria#bits()} and {@link Query}. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java new file mode 100644 index 0000000000..dfee94954c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java @@ -0,0 +1,116 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import java.util.regex.Pattern; + +import org.springframework.lang.Nullable; + +/** + * Utility to translate {@link Pattern#flags() regex flags} to MongoDB regex options and vice versa. + * + * @author Mark Paluch + * @since 3.3 + */ +public abstract class RegexFlags { + + private static final int[] FLAG_LOOKUP = new int[Character.MAX_VALUE]; + + static { + FLAG_LOOKUP['g'] = 256; + FLAG_LOOKUP['i'] = Pattern.CASE_INSENSITIVE; + FLAG_LOOKUP['m'] = Pattern.MULTILINE; + FLAG_LOOKUP['s'] = Pattern.DOTALL; + FLAG_LOOKUP['c'] = Pattern.CANON_EQ; + FLAG_LOOKUP['x'] = Pattern.COMMENTS; + FLAG_LOOKUP['d'] = Pattern.UNIX_LINES; + FLAG_LOOKUP['t'] = Pattern.LITERAL; + FLAG_LOOKUP['u'] = Pattern.UNICODE_CASE; + } + + private RegexFlags() { + + } + + /** + * Lookup the MongoDB specific options from given {@link Pattern#flags() flags}. + * + * @param flags the Regex flags to look up. + * @return the options string. May be empty. + */ + public static String toRegexOptions(int flags) { + + if (flags == 0) { + return ""; + } + + StringBuilder buf = new StringBuilder(); + + for (int i = 'a'; i < 'z'; i++) { + + if (FLAG_LOOKUP[i] == 0) { + continue; + } + + if ((flags & FLAG_LOOKUP[i]) > 0) { + buf.append((char) i); + } + } + + return buf.toString(); + } + + /** + * Lookup the MongoDB specific flags for a given regex option string. + * + * @param s the Regex option/flag to look up. Can be {@literal null}. + * @return zero if given {@link String} is {@literal null} or empty. + * @since 2.2 + */ + public static int toRegexFlags(@Nullable String s) { + + int flags = 0; + + if (s == null) { + return flags; + } + + for (char f : s.toLowerCase().toCharArray()) { + flags |= toRegexFlag(f); + } + + return flags; + } + + /** + * Lookup the MongoDB specific flags for a given character. + * + * @param c the Regex option/flag to look up. + * @return + * @throws IllegalArgumentException for unknown flags + * @since 2.2 + */ + public static int toRegexFlag(char c) { + + int flag = FLAG_LOOKUP[c]; + + if (flag == 0) { + throw new IllegalArgumentException(String.format("Unrecognized flag [%c]", c)); + } + + return flag; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java index ff6771d9f1..9ef207c9ad 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java @@ -55,24 +55,25 @@ * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Divya Srivastava * @author Mark Paluch */ public class ProjectionOperationUnitTests { - static final String MOD = "$mod"; - static final String ADD = "$add"; - static final String SUBTRACT = "$subtract"; - static final String MULTIPLY = "$multiply"; - static final String DIVIDE = "$divide"; - static final String PROJECT = "$project"; + private static final String MOD = "$mod"; + private static final String ADD = "$add"; + private static final String SUBTRACT = "$subtract"; + private static final String MULTIPLY = "$multiply"; + private static final String DIVIDE = "$divide"; + private static final String PROJECT = "$project"; @Test // DATAMONGO-586 - public void rejectsNullFields() { + void rejectsNullFields() { assertThatIllegalArgumentException().isThrownBy(() -> new ProjectionOperation((Fields) null)); } @Test // DATAMONGO-586 - public void declaresBackReferenceCorrectly() { + void declaresBackReferenceCorrectly() { ProjectionOperation operation = new ProjectionOperation(); operation = operation.and("prop").previousOperation(); @@ -83,7 +84,7 @@ public void declaresBackReferenceCorrectly() { } @Test // DATAMONGO-586 - public void alwaysUsesExplicitReference() { + void alwaysUsesExplicitReference() { ProjectionOperation operation = new ProjectionOperation(Fields.fields("foo").and("bar", "foobar")); @@ -95,7 +96,7 @@ public void alwaysUsesExplicitReference() { } @Test // DATAMONGO-586 - public void aliasesSimpleFieldProjection() { + void aliasesSimpleFieldProjection() { ProjectionOperation operation = new ProjectionOperation(); @@ -106,7 +107,7 @@ public void aliasesSimpleFieldProjection() { } @Test // DATAMONGO-586 - public void aliasesArithmeticProjection() { + void aliasesArithmeticProjection() { ProjectionOperation operation = new ProjectionOperation(); @@ -121,7 +122,7 @@ public void aliasesArithmeticProjection() { } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationWithoutAlias() { + void arithmeticProjectionOperationWithoutAlias() { String fieldName = "a"; ProjectionOperationBuilder operation = new ProjectionOperation().and(fieldName).plus(1); @@ -134,7 +135,7 @@ public void arithmeticProjectionOperationWithoutAlias() { } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationPlus() { + void arithmeticProjectionOperationPlus() { String fieldName = "a"; String fieldAlias = "b"; @@ -148,7 +149,7 @@ public void arithmeticProjectionOperationPlus() { } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationMinus() { + void arithmeticProjectionOperationMinus() { String fieldName = "a"; String fieldAlias = "b"; @@ -162,7 +163,7 @@ public void arithmeticProjectionOperationMinus() { } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationMultiply() { + void arithmeticProjectionOperationMultiply() { String fieldName = "a"; String fieldAlias = "b"; @@ -176,7 +177,7 @@ public void arithmeticProjectionOperationMultiply() { } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationDivide() { + void arithmeticProjectionOperationDivide() { String fieldName = "a"; String fieldAlias = "b"; @@ -190,12 +191,12 @@ public void arithmeticProjectionOperationDivide() { } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationDivideByZeroException() { + void arithmeticProjectionOperationDivideByZeroException() { assertThatIllegalArgumentException().isThrownBy(() -> new ProjectionOperation().and("a").divide(0)); } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationMod() { + void arithmeticProjectionOperationMod() { String fieldName = "a"; String fieldAlias = "b"; @@ -209,7 +210,7 @@ public void arithmeticProjectionOperationMod() { } @Test // DATAMONGO-758, DATAMONGO-1893 - public void excludeShouldAllowExclusionOfFieldsOtherThanUnderscoreId/* since MongoDB 3.4 */() { + void excludeShouldAllowExclusionOfFieldsOtherThanUnderscoreId/* since MongoDB 3.4 */() { ProjectionOperation projectionOp = new ProjectionOperation().andExclude("foo"); Document document = projectionOp.toDocument(Aggregation.DEFAULT_CONTEXT); @@ -220,7 +221,7 @@ public void arithmeticProjectionOperationMod() { } @Test // DATAMONGO-1893 - public void includeShouldNotInheritFields() { + void includeShouldNotInheritFields() { ProjectionOperation projectionOp = new ProjectionOperation().andInclude("foo"); @@ -228,7 +229,7 @@ public void includeShouldNotInheritFields() { } @Test // DATAMONGO-758 - public void excludeShouldAllowExclusionOfUnderscoreId() { + void excludeShouldAllowExclusionOfUnderscoreId() { ProjectionOperation projectionOp = new ProjectionOperation().andExclude(Fields.UNDERSCORE_ID); Document document = projectionOp.toDocument(Aggregation.DEFAULT_CONTEXT); @@ -237,7 +238,7 @@ public void excludeShouldAllowExclusionOfUnderscoreId() { } @Test // DATAMONGO-1906 - public void rendersConditionalProjectionCorrectly() { + void rendersConditionalProjectionCorrectly() { TypedAggregation aggregation = Aggregation.newAggregation(Book.class, Aggregation.project("title") @@ -252,7 +253,7 @@ public void rendersConditionalProjectionCorrectly() { } @Test // DATAMONGO-757 - public void usesImplictAndExplicitFieldAliasAndIncludeExclude() { + void usesImplictAndExplicitFieldAliasAndIncludeExclude() { ProjectionOperation operation = Aggregation.project("foo").and("foobar").as("bar").andInclude("inc1", "inc2") .andExclude("_id"); @@ -268,12 +269,12 @@ public void usesImplictAndExplicitFieldAliasAndIncludeExclude() { } @Test - public void arithmeticProjectionOperationModByZeroException() { + void arithmeticProjectionOperationModByZeroException() { assertThatIllegalArgumentException().isThrownBy(() -> new ProjectionOperation().and("a").mod(0)); } @Test // DATAMONGO-769 - public void allowArithmeticOperationsWithFieldReferences() { + void allowArithmeticOperationsWithFieldReferences() { ProjectionOperation operation = Aggregation.project() // .and("foo").plus("bar").as("fooPlusBar") // @@ -298,7 +299,7 @@ public void allowArithmeticOperationsWithFieldReferences() { } @Test // DATAMONGO-774 - public void projectionExpressions() { + void projectionExpressions() { ProjectionOperation operation = Aggregation.project() // .andExpression("(netPrice + surCharge) * taxrate * [0]", 2).as("grossSalesPrice") // @@ -310,7 +311,7 @@ public void projectionExpressions() { } @Test // DATAMONGO-975 - public void shouldRenderDateTimeFragmentExtractionsForSimpleFieldProjectionsCorrectly() { + void shouldRenderDateTimeFragmentExtractionsForSimpleFieldProjectionsCorrectly() { ProjectionOperation operation = Aggregation.project() // .and("date").extractHour().as("hour") // @@ -343,7 +344,7 @@ public void shouldRenderDateTimeFragmentExtractionsForSimpleFieldProjectionsCorr } @Test // DATAMONGO-975 - public void shouldRenderDateTimeFragmentExtractionsForExpressionProjectionsCorrectly() throws Exception { + void shouldRenderDateTimeFragmentExtractionsForExpressionProjectionsCorrectly() throws Exception { ProjectionOperation operation = Aggregation.project() // .andExpression("date + 86400000") // @@ -360,7 +361,7 @@ public void shouldRenderDateTimeFragmentExtractionsForExpressionProjectionsCorre } @Test // DATAMONGO-979 - public void shouldRenderSizeExpressionInProjection() { + void shouldRenderSizeExpressionInProjection() { ProjectionOperation operation = Aggregation // .project() // @@ -375,7 +376,7 @@ public void shouldRenderSizeExpressionInProjection() { } @Test // DATAMONGO-979 - public void shouldRenderGenericSizeExpressionInProjection() { + void shouldRenderGenericSizeExpressionInProjection() { ProjectionOperation operation = Aggregation // .project() // @@ -389,7 +390,7 @@ public void shouldRenderGenericSizeExpressionInProjection() { } @Test // DATAMONGO-1457 - public void shouldRenderSliceCorrectly() throws Exception { + void shouldRenderSliceCorrectly() throws Exception { ProjectionOperation operation = Aggregation.project().and("field").slice(10).as("renamed"); @@ -400,7 +401,7 @@ public void shouldRenderSliceCorrectly() throws Exception { } @Test // DATAMONGO-1457 - public void shouldRenderSliceWithPositionCorrectly() throws Exception { + void shouldRenderSliceWithPositionCorrectly() throws Exception { ProjectionOperation operation = Aggregation.project().and("field").slice(10, 5).as("renamed"); @@ -411,7 +412,7 @@ public void shouldRenderSliceWithPositionCorrectly() throws Exception { } @Test // DATAMONGO-784 - public void shouldRenderCmpCorrectly() { + void shouldRenderCmpCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").cmp(10).as("cmp10"); @@ -420,7 +421,7 @@ public void shouldRenderCmpCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderEqCorrectly() { + void shouldRenderEqCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").eq(10).as("eq10"); @@ -429,7 +430,7 @@ public void shouldRenderEqCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderGtCorrectly() { + void shouldRenderGtCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").gt(10).as("gt10"); @@ -438,7 +439,7 @@ public void shouldRenderGtCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderGteCorrectly() { + void shouldRenderGteCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").gte(10).as("gte10"); @@ -447,7 +448,7 @@ public void shouldRenderGteCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderLtCorrectly() { + void shouldRenderLtCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").lt(10).as("lt10"); @@ -456,7 +457,7 @@ public void shouldRenderLtCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderLteCorrectly() { + void shouldRenderLteCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").lte(10).as("lte10"); @@ -465,7 +466,7 @@ public void shouldRenderLteCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderNeCorrectly() { + void shouldRenderNeCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").ne(10).as("ne10"); @@ -474,7 +475,7 @@ public void shouldRenderNeCorrectly() { } @Test // DATAMONGO-1536 - public void shouldRenderSetEquals() { + void shouldRenderSetEquals() { Document agg = project("A", "B").and("A").equalsArrays("B").as("sameElements") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -484,7 +485,7 @@ public void shouldRenderSetEquals() { } @Test // DATAMONGO-1536 - public void shouldRenderSetEqualsAggregationExpresssion() { + void shouldRenderSetEqualsAggregationExpresssion() { Document agg = project("A", "B").and(SetOperators.arrayAsSet("A").isEqualTo("B")).as("sameElements") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -494,7 +495,7 @@ public void shouldRenderSetEqualsAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderSetIntersection() { + void shouldRenderSetIntersection() { Document agg = project("A", "B").and("A").intersectsArrays("B").as("commonToBoth") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -504,7 +505,7 @@ public void shouldRenderSetIntersection() { } @Test // DATAMONGO-1536 - public void shouldRenderSetIntersectionAggregationExpresssion() { + void shouldRenderSetIntersectionAggregationExpresssion() { Document agg = project("A", "B").and(SetOperators.arrayAsSet("A").intersects("B")).as("commonToBoth") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -514,7 +515,7 @@ public void shouldRenderSetIntersectionAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderSetUnion() { + void shouldRenderSetUnion() { Document agg = project("A", "B").and("A").unionArrays("B").as("allValues").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -523,7 +524,7 @@ public void shouldRenderSetUnion() { } @Test // DATAMONGO-1536 - public void shouldRenderSetUnionAggregationExpresssion() { + void shouldRenderSetUnionAggregationExpresssion() { Document agg = project("A", "B").and(SetOperators.arrayAsSet("A").union("B")).as("allValues") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -533,7 +534,7 @@ public void shouldRenderSetUnionAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderSetDifference() { + void shouldRenderSetDifference() { Document agg = project("A", "B").and("B").differenceToArray("A").as("inBOnly") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -543,7 +544,7 @@ public void shouldRenderSetDifference() { } @Test // DATAMONGO-1536 - public void shouldRenderSetDifferenceAggregationExpresssion() { + void shouldRenderSetDifferenceAggregationExpresssion() { Document agg = project("A", "B").and(SetOperators.arrayAsSet("B").differenceTo("A")).as("inBOnly") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -553,7 +554,7 @@ public void shouldRenderSetDifferenceAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderSetIsSubset() { + void shouldRenderSetIsSubset() { Document agg = project("A", "B").and("A").subsetOfArray("B").as("aIsSubsetOfB") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -563,7 +564,7 @@ public void shouldRenderSetIsSubset() { } @Test // DATAMONGO-1536 - public void shouldRenderSetIsSubsetAggregationExpresssion() { + void shouldRenderSetIsSubsetAggregationExpresssion() { Document agg = project("A", "B").and(SetOperators.arrayAsSet("A").isSubsetOf("B")).as("aIsSubsetOfB") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -573,7 +574,7 @@ public void shouldRenderSetIsSubsetAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderAnyElementTrue() { + void shouldRenderAnyElementTrue() { Document agg = project("responses").and("responses").anyElementInArrayTrue().as("isAnyTrue") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -583,7 +584,7 @@ public void shouldRenderAnyElementTrue() { } @Test // DATAMONGO-1536 - public void shouldRenderAnyElementTrueAggregationExpresssion() { + void shouldRenderAnyElementTrueAggregationExpresssion() { Document agg = project("responses").and(SetOperators.arrayAsSet("responses").anyElementTrue()).as("isAnyTrue") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -593,7 +594,7 @@ public void shouldRenderAnyElementTrueAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderAllElementsTrue() { + void shouldRenderAllElementsTrue() { Document agg = project("responses").and("responses").allElementsInArrayTrue().as("isAllTrue") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -603,7 +604,7 @@ public void shouldRenderAllElementsTrue() { } @Test // DATAMONGO-1536 - public void shouldRenderAllElementsTrueAggregationExpresssion() { + void shouldRenderAllElementsTrueAggregationExpresssion() { Document agg = project("responses").and(SetOperators.arrayAsSet("responses").allElementsTrue()).as("isAllTrue") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -613,7 +614,7 @@ public void shouldRenderAllElementsTrueAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderAbs() { + void shouldRenderAbs() { Document agg = project().and("anyNumber").absoluteValue().as("absoluteValue") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -622,7 +623,7 @@ public void shouldRenderAbs() { } @Test // DATAMONGO-1536 - public void shouldRenderAbsAggregationExpresssion() { + void shouldRenderAbsAggregationExpresssion() { Document agg = project() .and( @@ -634,7 +635,7 @@ public void shouldRenderAbsAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderAddAggregationExpresssion() { + void shouldRenderAddAggregationExpresssion() { Document agg = project().and(ArithmeticOperators.valueOf("price").add("fee")).as("total") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -643,7 +644,7 @@ public void shouldRenderAddAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderCeil() { + void shouldRenderCeil() { Document agg = project().and("anyNumber").ceil().as("ceilValue").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -651,7 +652,7 @@ public void shouldRenderCeil() { } @Test // DATAMONGO-1536 - public void shouldRenderCeilAggregationExpresssion() { + void shouldRenderCeilAggregationExpresssion() { Document agg = project().and( ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).ceil()) @@ -662,7 +663,7 @@ public void shouldRenderCeilAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderDivide() { + void shouldRenderDivide() { Document agg = project().and("value") .divide(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).as("result") @@ -673,7 +674,7 @@ public void shouldRenderDivide() { } @Test // DATAMONGO-1536 - public void shouldRenderDivideAggregationExpresssion() { + void shouldRenderDivideAggregationExpresssion() { Document agg = project() .and(ArithmeticOperators.valueOf("anyNumber") @@ -685,7 +686,7 @@ public void shouldRenderDivideAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderExp() { + void shouldRenderExp() { Document agg = project().and("value").exp().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -693,7 +694,7 @@ public void shouldRenderExp() { } @Test // DATAMONGO-1536 - public void shouldRenderExpAggregationExpresssion() { + void shouldRenderExpAggregationExpresssion() { Document agg = project() .and( @@ -705,7 +706,7 @@ public void shouldRenderExpAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderFloor() { + void shouldRenderFloor() { Document agg = project().and("value").floor().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -713,7 +714,7 @@ public void shouldRenderFloor() { } @Test // DATAMONGO-1536 - public void shouldRenderFloorAggregationExpresssion() { + void shouldRenderFloorAggregationExpresssion() { Document agg = project().and( ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).floor()) @@ -724,7 +725,7 @@ public void shouldRenderFloorAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderLn() { + void shouldRenderLn() { Document agg = project().and("value").ln().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -732,7 +733,7 @@ public void shouldRenderLn() { } @Test // DATAMONGO-1536 - public void shouldRenderLnAggregationExpresssion() { + void shouldRenderLnAggregationExpresssion() { Document agg = project() .and(ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).ln()) @@ -743,7 +744,7 @@ public void shouldRenderLnAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderLog() { + void shouldRenderLog() { Document agg = project().and("value").log(2).as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -751,7 +752,7 @@ public void shouldRenderLog() { } @Test // DATAMONGO-1536 - public void shouldRenderLogAggregationExpresssion() { + void shouldRenderLogAggregationExpresssion() { Document agg = project().and( ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).log(2)) @@ -762,7 +763,7 @@ public void shouldRenderLogAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderLog10() { + void shouldRenderLog10() { Document agg = project().and("value").log10().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -770,7 +771,7 @@ public void shouldRenderLog10() { } @Test // DATAMONGO-1536 - public void shouldRenderLog10AggregationExpresssion() { + void shouldRenderLog10AggregationExpresssion() { Document agg = project().and( ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).log10()) @@ -781,7 +782,7 @@ public void shouldRenderLog10AggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderMod() { + void shouldRenderMod() { Document agg = project().and("value").mod(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))) .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -791,7 +792,7 @@ public void shouldRenderMod() { } @Test // DATAMONGO-1536 - public void shouldRenderModAggregationExpresssion() { + void shouldRenderModAggregationExpresssion() { Document agg = project().and( ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).mod(2)) @@ -802,7 +803,7 @@ public void shouldRenderModAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderMultiply() { + void shouldRenderMultiply() { Document agg = project().and("value") .multiply(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).as("result") @@ -813,7 +814,7 @@ public void shouldRenderMultiply() { } @Test // DATAMONGO-1536 - public void shouldRenderMultiplyAggregationExpresssion() { + void shouldRenderMultiplyAggregationExpresssion() { Document agg = project() .and(ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))) @@ -825,7 +826,7 @@ public void shouldRenderMultiplyAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderPow() { + void shouldRenderPow() { Document agg = project().and("value").pow(2).as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -833,7 +834,7 @@ public void shouldRenderPow() { } @Test // DATAMONGO-1536 - public void shouldRenderPowAggregationExpresssion() { + void shouldRenderPowAggregationExpresssion() { Document agg = project().and( ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).pow(2)) @@ -844,7 +845,7 @@ public void shouldRenderPowAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderSqrt() { + void shouldRenderSqrt() { Document agg = project().and("value").sqrt().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -852,7 +853,7 @@ public void shouldRenderSqrt() { } @Test // DATAMONGO-1536 - public void shouldRenderSqrtAggregationExpresssion() { + void shouldRenderSqrtAggregationExpresssion() { Document agg = project().and( ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).sqrt()) @@ -863,7 +864,7 @@ public void shouldRenderSqrtAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderSubtract() { + void shouldRenderSubtract() { Document agg = project().and("numericField").minus(AggregationFunctionExpressions.SIZE.of(field("someArray"))) .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -873,7 +874,7 @@ public void shouldRenderSubtract() { } @Test // DATAMONGO-1536 - public void shouldRenderSubtractAggregationExpresssion() { + void shouldRenderSubtractAggregationExpresssion() { Document agg = project() .and(ArithmeticOperators.valueOf("numericField") @@ -885,7 +886,7 @@ public void shouldRenderSubtractAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderTrunc() { + void shouldRenderTrunc() { Document agg = project().and("value").trunc().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -893,7 +894,7 @@ public void shouldRenderTrunc() { } @Test // DATAMONGO-1536 - public void shouldRenderTruncAggregationExpresssion() { + void shouldRenderTruncAggregationExpresssion() { Document agg = project().and( ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).trunc()) @@ -904,7 +905,7 @@ public void shouldRenderTruncAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderConcat() { + void shouldRenderConcat() { Document agg = project().and("item").concat(" - ", field("description")).as("itemDescription") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -915,7 +916,7 @@ public void shouldRenderConcat() { } @Test // DATAMONGO-1536 - public void shouldRenderConcatAggregationExpression() { + void shouldRenderConcatAggregationExpression() { Document agg = project().and(StringOperators.valueOf("item").concat(" - ").concatValueOf("description")) .as("itemDescription").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -926,7 +927,7 @@ public void shouldRenderConcatAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderSubstr() { + void shouldRenderSubstr() { Document agg = project().and("quarter").substring(0, 2).as("yearSubstring").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -934,7 +935,7 @@ public void shouldRenderSubstr() { } @Test // DATAMONGO-1536 - public void shouldRenderSubstrAggregationExpression() { + void shouldRenderSubstrAggregationExpression() { Document agg = project().and(StringOperators.valueOf("quarter").substring(0, 2)).as("yearSubstring") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -943,7 +944,7 @@ public void shouldRenderSubstrAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderToLower() { + void shouldRenderToLower() { Document agg = project().and("item").toLower().as("item").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -951,7 +952,7 @@ public void shouldRenderToLower() { } @Test // DATAMONGO-1536 - public void shouldRenderToLowerAggregationExpression() { + void shouldRenderToLowerAggregationExpression() { Document agg = project().and(StringOperators.valueOf("item").toLower()).as("item") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -960,7 +961,7 @@ public void shouldRenderToLowerAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderToUpper() { + void shouldRenderToUpper() { Document agg = project().and("item").toUpper().as("item").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -968,7 +969,7 @@ public void shouldRenderToUpper() { } @Test // DATAMONGO-1536 - public void shouldRenderToUpperAggregationExpression() { + void shouldRenderToUpperAggregationExpression() { Document agg = project().and(StringOperators.valueOf("item").toUpper()).as("item") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -977,7 +978,7 @@ public void shouldRenderToUpperAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderStrCaseCmp() { + void shouldRenderStrCaseCmp() { Document agg = project().and("quarter").strCaseCmp("13q4").as("comparisonResult") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -987,7 +988,7 @@ public void shouldRenderStrCaseCmp() { } @Test // DATAMONGO-1536 - public void shouldRenderStrCaseCmpAggregationExpression() { + void shouldRenderStrCaseCmpAggregationExpression() { Document agg = project().and(StringOperators.valueOf("quarter").strCaseCmp("13q4")).as("comparisonResult") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -997,7 +998,7 @@ public void shouldRenderStrCaseCmpAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderArrayElementAt() { + void shouldRenderArrayElementAt() { Document agg = project().and("favorites").arrayElementAt(0).as("first").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1005,7 +1006,7 @@ public void shouldRenderArrayElementAt() { } @Test // DATAMONGO-1536 - public void shouldRenderArrayElementAtAggregationExpression() { + void shouldRenderArrayElementAtAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("favorites").elementAt(0)).as("first") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1014,7 +1015,7 @@ public void shouldRenderArrayElementAtAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderConcatArrays() { + void shouldRenderConcatArrays() { Document agg = project().and("instock").concatArrays("ordered").as("items").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1023,7 +1024,7 @@ public void shouldRenderConcatArrays() { } @Test // DATAMONGO-1536 - public void shouldRenderConcatArraysAggregationExpression() { + void shouldRenderConcatArraysAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("instock").concat("ordered")).as("items") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1033,7 +1034,7 @@ public void shouldRenderConcatArraysAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderIsArray() { + void shouldRenderIsArray() { Document agg = project().and("instock").isArray().as("isAnArray").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1041,7 +1042,7 @@ public void shouldRenderIsArray() { } @Test // DATAMONGO-1536 - public void shouldRenderIsArrayAggregationExpression() { + void shouldRenderIsArrayAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("instock").isArray()).as("isAnArray") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1050,7 +1051,7 @@ public void shouldRenderIsArrayAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderSizeAggregationExpression() { + void shouldRenderSizeAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("instock").length()).as("arraySize") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1059,7 +1060,7 @@ public void shouldRenderSizeAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderSliceAggregationExpression() { + void shouldRenderSliceAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("favorites").slice().itemCount(3)).as("threeFavorites") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1068,7 +1069,7 @@ public void shouldRenderSliceAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderSliceWithPositionAggregationExpression() { + void shouldRenderSliceWithPositionAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("favorites").slice().offset(2).itemCount(3)) .as("threeFavorites").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1077,7 +1078,7 @@ public void shouldRenderSliceWithPositionAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderLiteral() { + void shouldRenderLiteral() { Document agg = project().and("$1").asLiteral().as("literalOnly").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1085,7 +1086,7 @@ public void shouldRenderLiteral() { } @Test // DATAMONGO-1536 - public void shouldRenderLiteralAggregationExpression() { + void shouldRenderLiteralAggregationExpression() { Document agg = project().and(LiteralOperators.valueOf("$1").asLiteral()).as("literalOnly") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1094,7 +1095,7 @@ public void shouldRenderLiteralAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderDayOfYearAggregationExpression() { + void shouldRenderDayOfYearAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").dayOfYear()).as("dayOfYear") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1103,7 +1104,7 @@ public void shouldRenderDayOfYearAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderDayOfYearAggregationExpressionWithTimezone() { + void shouldRenderDayOfYearAggregationExpressionWithTimezone() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).dayOfYear()).as("dayOfYear") @@ -1114,7 +1115,7 @@ public void shouldRenderDayOfYearAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1834 - public void shouldRenderTimeZoneFromField() { + void shouldRenderTimeZoneFromField() { Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.ofField("tz")).dayOfYear()) .as("dayOfYear").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1124,7 +1125,7 @@ public void shouldRenderTimeZoneFromField() { } @Test // DATAMONGO-1834 - public void shouldRenderTimeZoneFromExpression() { + void shouldRenderTimeZoneFromExpression() { Document agg = project() .and(DateOperators.dateOf("date") @@ -1136,7 +1137,7 @@ public void shouldRenderTimeZoneFromExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderDayOfMonthAggregationExpression() { + void shouldRenderDayOfMonthAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").dayOfMonth()).as("day") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1145,7 +1146,7 @@ public void shouldRenderDayOfMonthAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderDayOfMonthAggregationExpressionWithTimezone() { + void shouldRenderDayOfMonthAggregationExpressionWithTimezone() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).dayOfMonth()).as("day") @@ -1156,7 +1157,7 @@ public void shouldRenderDayOfMonthAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderDayOfWeekAggregationExpression() { + void shouldRenderDayOfWeekAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").dayOfWeek()).as("dayOfWeek") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1165,7 +1166,7 @@ public void shouldRenderDayOfWeekAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderDayOfWeekAggregationExpressionWithTimezone() { + void shouldRenderDayOfWeekAggregationExpressionWithTimezone() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).dayOfWeek()).as("dayOfWeek") @@ -1176,7 +1177,7 @@ public void shouldRenderDayOfWeekAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderYearAggregationExpression() { + void shouldRenderYearAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").year()).as("year") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1185,7 +1186,7 @@ public void shouldRenderYearAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderYearAggregationExpressionWithTimezone() { + void shouldRenderYearAggregationExpressionWithTimezone() { Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).year()) .as("year").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1195,7 +1196,7 @@ public void shouldRenderYearAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderMonthAggregationExpression() { + void shouldRenderMonthAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").month()).as("month") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1204,7 +1205,7 @@ public void shouldRenderMonthAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderMonthAggregationExpressionWithTimezone() { + void shouldRenderMonthAggregationExpressionWithTimezone() { Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).month()) .as("month").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1214,7 +1215,7 @@ public void shouldRenderMonthAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderWeekAggregationExpression() { + void shouldRenderWeekAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").week()).as("week") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1223,7 +1224,7 @@ public void shouldRenderWeekAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderWeekAggregationExpressionWithTimezone() { + void shouldRenderWeekAggregationExpressionWithTimezone() { Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).week()) .as("week").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1233,7 +1234,7 @@ public void shouldRenderWeekAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderHourAggregationExpression() { + void shouldRenderHourAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").hour()).as("hour") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1242,7 +1243,7 @@ public void shouldRenderHourAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderHourAggregationExpressionWithTimezone() { + void shouldRenderHourAggregationExpressionWithTimezone() { Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).hour()) .as("hour").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1252,7 +1253,7 @@ public void shouldRenderHourAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderMinuteAggregationExpression() { + void shouldRenderMinuteAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").minute()).as("minute") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1261,7 +1262,7 @@ public void shouldRenderMinuteAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderMinuteAggregationExpressionWithTimezone() { + void shouldRenderMinuteAggregationExpressionWithTimezone() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).minute()).as("minute") @@ -1272,7 +1273,7 @@ public void shouldRenderMinuteAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderSecondAggregationExpression() { + void shouldRenderSecondAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").second()).as("second") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1281,7 +1282,7 @@ public void shouldRenderSecondAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderSecondAggregationExpressionWithTimezone() { + void shouldRenderSecondAggregationExpressionWithTimezone() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).second()).as("second") @@ -1292,7 +1293,7 @@ public void shouldRenderSecondAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderMillisecondAggregationExpression() { + void shouldRenderMillisecondAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").millisecond()).as("msec") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1301,7 +1302,7 @@ public void shouldRenderMillisecondAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderMillisecondAggregationExpressionWithTimezone() { + void shouldRenderMillisecondAggregationExpressionWithTimezone() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).millisecond()).as("msec") @@ -1312,7 +1313,7 @@ public void shouldRenderMillisecondAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderDateToString() { + void shouldRenderDateToString() { Document agg = project().and("date").dateAsFormattedString("%H:%M:%S:%L").as("time") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1322,7 +1323,7 @@ public void shouldRenderDateToString() { } @Test // DATAMONGO-2047 - public void shouldRenderDateToStringWithoutFormatOption() { + void shouldRenderDateToStringWithoutFormatOption() { Document agg = project().and("date").dateAsFormattedString().as("time").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1330,7 +1331,7 @@ public void shouldRenderDateToStringWithoutFormatOption() { } @Test // DATAMONGO-1536 - public void shouldRenderDateToStringAggregationExpression() { + void shouldRenderDateToStringAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").toString("%H:%M:%S:%L")).as("time") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1340,7 +1341,7 @@ public void shouldRenderDateToStringAggregationExpression() { } @Test // DATAMONGO-1834, DATAMONGO-2047 - public void shouldRenderDateToStringAggregationExpressionWithTimezone() { + void shouldRenderDateToStringAggregationExpressionWithTimezone() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).toString("%H:%M:%S:%L")) @@ -1358,7 +1359,7 @@ public void shouldRenderDateToStringAggregationExpressionWithTimezone() { } @Test // DATAMONGO-2047 - public void shouldRenderDateToStringWithOnNull() { + void shouldRenderDateToStringWithOnNull() { Document agg = project() .and(DateOperators.dateOf("date").toStringWithDefaultFormat().onNullReturnValueOf("fallback-field")).as("time") @@ -1369,7 +1370,7 @@ public void shouldRenderDateToStringWithOnNull() { } @Test // DATAMONGO-2047 - public void shouldRenderDateToStringWithOnNullExpression() { + void shouldRenderDateToStringWithOnNullExpression() { Document agg = project() .and(DateOperators.dateOf("date").toStringWithDefaultFormat() @@ -1381,7 +1382,7 @@ public void shouldRenderDateToStringWithOnNullExpression() { } @Test // DATAMONGO-2047 - public void shouldRenderDateToStringWithOnNullAndTimezone() { + void shouldRenderDateToStringWithOnNullAndTimezone() { Document agg = project().and(DateOperators.dateOf("date").toStringWithDefaultFormat() .onNullReturnValueOf("fallback-field").withTimezone(Timezone.ofField("foo"))).as("time") @@ -1392,7 +1393,7 @@ public void shouldRenderDateToStringWithOnNullAndTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderSumAggregationExpression() { + void shouldRenderSumAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("quizzes").sum()).as("quizTotal") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1401,7 +1402,7 @@ public void shouldRenderSumAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderSumWithMultipleArgsAggregationExpression() { + void shouldRenderSumWithMultipleArgsAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("final").sum().and("midterm")).as("examTotal") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1410,7 +1411,7 @@ public void shouldRenderSumWithMultipleArgsAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderAvgAggregationExpression() { + void shouldRenderAvgAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("quizzes").avg()).as("quizAvg") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1419,7 +1420,7 @@ public void shouldRenderAvgAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderAvgWithMultipleArgsAggregationExpression() { + void shouldRenderAvgWithMultipleArgsAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("final").avg().and("midterm")).as("examAvg") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1428,7 +1429,7 @@ public void shouldRenderAvgWithMultipleArgsAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderMaxAggregationExpression() { + void shouldRenderMaxAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("quizzes").max()).as("quizMax") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1437,7 +1438,7 @@ public void shouldRenderMaxAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderMaxWithMultipleArgsAggregationExpression() { + void shouldRenderMaxWithMultipleArgsAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("final").max().and("midterm")).as("examMax") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1446,7 +1447,7 @@ public void shouldRenderMaxWithMultipleArgsAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderMinAggregationExpression() { + void shouldRenderMinAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("quizzes").min()).as("quizMin") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1455,7 +1456,7 @@ public void shouldRenderMinAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderMinWithMultipleArgsAggregationExpression() { + void shouldRenderMinWithMultipleArgsAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("final").min().and("midterm")).as("examMin") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1464,7 +1465,7 @@ public void shouldRenderMinWithMultipleArgsAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderStdDevPopAggregationExpression() { + void shouldRenderStdDevPopAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("scores").stdDevPop()).as("stdDev") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1473,7 +1474,7 @@ public void shouldRenderStdDevPopAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderStdDevSampAggregationExpression() { + void shouldRenderStdDevSampAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("scores").stdDevSamp()).as("stdDev") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1482,7 +1483,7 @@ public void shouldRenderStdDevSampAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderCmpAggregationExpression() { + void shouldRenderCmpAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").compareToValue(250)).as("cmp250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1491,7 +1492,7 @@ public void shouldRenderCmpAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderEqAggregationExpression() { + void shouldRenderEqAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").equalToValue(250)).as("eq250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1500,7 +1501,7 @@ public void shouldRenderEqAggregationExpression() { } @Test // DATAMONGO-2513 - public void shouldRenderEqAggregationExpressionWithListComparison() { + void shouldRenderEqAggregationExpressionWithListComparison() { Document agg = project().and(ComparisonOperators.valueOf("qty").equalToValue(Arrays.asList(250))).as("eq250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1509,7 +1510,7 @@ public void shouldRenderEqAggregationExpressionWithListComparison() { } @Test // DATAMONGO-1536 - public void shouldRenderGtAggregationExpression() { + void shouldRenderGtAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").greaterThanValue(250)).as("gt250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1518,7 +1519,7 @@ public void shouldRenderGtAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderGteAggregationExpression() { + void shouldRenderGteAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").greaterThanEqualToValue(250)).as("gte250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1527,7 +1528,7 @@ public void shouldRenderGteAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderLtAggregationExpression() { + void shouldRenderLtAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").lessThanValue(250)).as("lt250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1536,7 +1537,7 @@ public void shouldRenderLtAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderLteAggregationExpression() { + void shouldRenderLteAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").lessThanEqualToValue(250)).as("lte250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1545,7 +1546,7 @@ public void shouldRenderLteAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderNeAggregationExpression() { + void shouldRenderNeAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").notEqualToValue(250)).as("ne250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1554,7 +1555,7 @@ public void shouldRenderNeAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderLogicAndAggregationExpression() { + void shouldRenderLogicAndAggregationExpression() { Document agg = project() .and(BooleanOperators.valueOf(ComparisonOperators.valueOf("qty").greaterThanValue(100)) @@ -1566,7 +1567,7 @@ public void shouldRenderLogicAndAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderLogicOrAggregationExpression() { + void shouldRenderLogicOrAggregationExpression() { Document agg = project() .and(BooleanOperators.valueOf(ComparisonOperators.valueOf("qty").greaterThanValue(250)) @@ -1578,7 +1579,7 @@ public void shouldRenderLogicOrAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderNotAggregationExpression() { + void shouldRenderNotAggregationExpression() { Document agg = project().and(BooleanOperators.not(ComparisonOperators.valueOf("qty").greaterThanValue(250))) .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1587,7 +1588,7 @@ public void shouldRenderNotAggregationExpression() { } @Test // DATAMONGO-1540 - public void shouldRenderMapAggregationExpression() { + void shouldRenderMapAggregationExpression() { Document agg = Aggregation.project() .and(VariableOperators.mapItemsOf("quizzes").as("grade") @@ -1599,7 +1600,7 @@ public void shouldRenderMapAggregationExpression() { } @Test // DATAMONGO-1540 - public void shouldRenderMapAggregationExpressionOnExpression() { + void shouldRenderMapAggregationExpressionOnExpression() { Document agg = Aggregation.project() .and(VariableOperators.mapItemsOf(AggregationFunctionExpressions.SIZE.of("foo")).as("grade") @@ -1611,7 +1612,7 @@ public void shouldRenderMapAggregationExpressionOnExpression() { } @Test // DATAMONGO-861, DATAMONGO-1542 - public void shouldRenderIfNullConditionAggregationExpression() { + void shouldRenderIfNullConditionAggregationExpression() { Document agg = project().and( ConditionalOperators.ifNull(ArrayOperators.arrayOf("array").elementAt(1)).then("a more sophisticated value")) @@ -1622,7 +1623,7 @@ public void shouldRenderIfNullConditionAggregationExpression() { } @Test // DATAMONGO-1542 - public void shouldRenderIfNullValueAggregationExpression() { + void shouldRenderIfNullValueAggregationExpression() { Document agg = project() .and(ConditionalOperators.ifNull("field").then(ArrayOperators.arrayOf("array").elementAt(1))).as("result") @@ -1633,7 +1634,7 @@ public void shouldRenderIfNullValueAggregationExpression() { } @Test // DATAMONGO-861, DATAMONGO-1542 - public void fieldReplacementIfNullShouldRenderCorrectly() { + void fieldReplacementIfNullShouldRenderCorrectly() { Document agg = project().and(ConditionalOperators.ifNull("optional").thenValueOf("$never-null")).as("result") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1643,7 +1644,7 @@ public void fieldReplacementIfNullShouldRenderCorrectly() { } @Test // DATAMONGO-1538 - public void shouldRenderLetExpressionCorrectly() { + void shouldRenderLetExpressionCorrectly() { Document agg = Aggregation.project() .and(VariableOperators @@ -1665,7 +1666,7 @@ public void shouldRenderLetExpressionCorrectly() { } @Test // DATAMONGO-1538 - public void shouldRenderLetExpressionCorrectlyWhenUsingLetOnProjectionBuilder() { + void shouldRenderLetExpressionCorrectlyWhenUsingLetOnProjectionBuilder() { ExpressionVariable var1 = newVariable("total") .forExpression(AggregationFunctionExpressions.ADD.of(Fields.field("price"), Fields.field("tax"))); @@ -1688,7 +1689,7 @@ public void shouldRenderLetExpressionCorrectlyWhenUsingLetOnProjectionBuilder() } @Test // DATAMONGO-1548 - public void shouldRenderIndexOfBytesCorrectly() { + void shouldRenderIndexOfBytesCorrectly() { Document agg = project().and(StringOperators.valueOf("item").indexOf("foo")).as("byteLocation") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1698,7 +1699,7 @@ public void shouldRenderIndexOfBytesCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIndexOfBytesWithRangeCorrectly() { + void shouldRenderIndexOfBytesWithRangeCorrectly() { Document agg = project() .and(StringOperators.valueOf("item").indexOf("foo") @@ -1710,7 +1711,7 @@ public void shouldRenderIndexOfBytesWithRangeCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIndexOfCPCorrectly() { + void shouldRenderIndexOfCPCorrectly() { Document agg = project().and(StringOperators.valueOf("item").indexOfCP("foo")).as("cpLocation") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1719,7 +1720,7 @@ public void shouldRenderIndexOfCPCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIndexOfCPWithRangeCorrectly() { + void shouldRenderIndexOfCPWithRangeCorrectly() { Document agg = project() .and(StringOperators.valueOf("item").indexOfCP("foo") @@ -1731,7 +1732,7 @@ public void shouldRenderIndexOfCPWithRangeCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderSplitCorrectly() { + void shouldRenderSplitCorrectly() { Document agg = project().and(StringOperators.valueOf("city").split(", ")).as("city_state") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1740,7 +1741,7 @@ public void shouldRenderSplitCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderStrLenBytesCorrectly() { + void shouldRenderStrLenBytesCorrectly() { Document agg = project().and(StringOperators.valueOf("name").length()).as("length") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1749,7 +1750,7 @@ public void shouldRenderStrLenBytesCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderStrLenCPCorrectly() { + void shouldRenderStrLenCPCorrectly() { Document agg = project().and(StringOperators.valueOf("name").lengthCP()).as("length") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1758,7 +1759,7 @@ public void shouldRenderStrLenCPCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderSubstrCPCorrectly() { + void shouldRenderSubstrCPCorrectly() { Document agg = project().and(StringOperators.valueOf("quarter").substringCP(0, 2)).as("yearSubstring") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1766,27 +1767,27 @@ public void shouldRenderSubstrCPCorrectly() { assertThat(agg) .isEqualTo(Document.parse("{ $project : { yearSubstring: { $substrCP: [ \"$quarter\", 0, 2 ] } } }")); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindCorrectly() { + + @Test // GH-3725 + void shouldRenderRegexFindCorrectly() { Document agg = project().and(StringOperators.valueOf("field1").regexFind("e")).as("regex") .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).isEqualTo(Document.parse("{ $project : { regex: { $regexFind: { \"input\" : \"$field1\", \"regex\" : \"e\" } } } }")); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindAllCorrectly() { + + @Test // GH-3725 + void shouldRenderRegexFindAllCorrectly() { Document agg = project().and(StringOperators.valueOf("field1").regexFindAll("e")).as("regex") .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).isEqualTo(Document.parse("{ $project : { regex: { $regexFindAll: { \"input\" : \"$field1\", \"regex\" : \"e\" } } } }")); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexMatchCorrectly() { + + @Test // GH-3725 + void shouldRenderRegexMatchCorrectly() { Document agg = project().and(StringOperators.valueOf("field1").regexMatch("e")).as("regex") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1795,7 +1796,7 @@ public void shouldRenderRegexMatchCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIndexOfArrayCorrectly() { + void shouldRenderIndexOfArrayCorrectly() { Document agg = project().and(ArrayOperators.arrayOf("items").indexOf(2)).as("index") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1804,7 +1805,7 @@ public void shouldRenderIndexOfArrayCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderRangeCorrectly() { + void shouldRenderRangeCorrectly() { Document agg = project().and(ArrayOperators.RangeOperator.rangeStartingAt(0L).to("distance").withStepSize(25L)) .as("rest_stops").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1815,7 +1816,7 @@ public void shouldRenderRangeCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderReverseArrayCorrectly() { + void shouldRenderReverseArrayCorrectly() { Document agg = project().and(ArrayOperators.arrayOf("favorites").reverse()).as("reverseFavorites") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1824,7 +1825,7 @@ public void shouldRenderReverseArrayCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderReduceWithSimpleObjectCorrectly() { + void shouldRenderReduceWithSimpleObjectCorrectly() { Document agg = project() .and(ArrayOperators.arrayOf("probabilityArr") @@ -1836,7 +1837,7 @@ public void shouldRenderReduceWithSimpleObjectCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderReduceWithComplexObjectCorrectly() { + void shouldRenderReduceWithComplexObjectCorrectly() { PropertyExpression sum = PropertyExpression.property("sum").definedAs( ArithmeticOperators.valueOf(Variable.VALUE.referringTo("sum").getName()).add(Variable.THIS.getName())); @@ -1853,7 +1854,7 @@ public void shouldRenderReduceWithComplexObjectCorrectly() { } @Test // DATAMONGO-1843 - public void shouldRenderReduceWithInputAndInExpressionsCorrectly() { + void shouldRenderReduceWithInputAndInExpressionsCorrectly() { Document expected = Document.parse( "{ \"$project\" : { \"results\" : { \"$reduce\" : { \"input\" : { \"$slice\" : [\"$array\", 5] }, \"initialValue\" : \"\", \"in\" : { \"$concat\" : [\"$$value\", \"/\", \"$$this\"] } } } } }"); @@ -1874,7 +1875,7 @@ public void shouldRenderReduceWithInputAndInExpressionsCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderZipCorrectly() { + void shouldRenderZipCorrectly() { AggregationExpression elemAt0 = ArrayOperators.arrayOf("matrix").elementAt(0); AggregationExpression elemAt1 = ArrayOperators.arrayOf("matrix").elementAt(1); @@ -1889,7 +1890,7 @@ public void shouldRenderZipCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderInCorrectly() { + void shouldRenderInCorrectly() { Document agg = project().and(ArrayOperators.arrayOf("in_stock").containsValue("bananas")).as("has_bananas") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1899,7 +1900,7 @@ public void shouldRenderInCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIsoDayOfWeekCorrectly() { + void shouldRenderIsoDayOfWeekCorrectly() { Document agg = project().and(DateOperators.dateOf("birthday").isoDayOfWeek()).as("dayOfWeek") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1908,7 +1909,7 @@ public void shouldRenderIsoDayOfWeekCorrectly() { } @Test // DATAMONGO-1834 - public void shouldRenderIsoDayOfWeekWithTimezoneCorrectly() { + void shouldRenderIsoDayOfWeekWithTimezoneCorrectly() { Document agg = project() .and(DateOperators.dateOf("birthday").withTimezone(Timezone.valueOf("America/Chicago")).isoDayOfWeek()) @@ -1919,7 +1920,7 @@ public void shouldRenderIsoDayOfWeekWithTimezoneCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIsoWeekCorrectly() { + void shouldRenderIsoWeekCorrectly() { Document agg = project().and(DateOperators.dateOf("date").isoWeek()).as("weekNumber") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1928,7 +1929,7 @@ public void shouldRenderIsoWeekCorrectly() { } @Test // DATAMONGO-1834 - public void shouldRenderIsoWeekWithTimezoneCorrectly() { + void shouldRenderIsoWeekWithTimezoneCorrectly() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).isoWeek()).as("weekNumber") @@ -1939,7 +1940,7 @@ public void shouldRenderIsoWeekWithTimezoneCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIsoWeekYearCorrectly() { + void shouldRenderIsoWeekYearCorrectly() { Document agg = project().and(DateOperators.dateOf("date").isoWeekYear()).as("yearNumber") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1948,7 +1949,7 @@ public void shouldRenderIsoWeekYearCorrectly() { } @Test // DATAMONGO-1834 - public void shouldRenderIsoWeekYearWithTimezoneCorrectly() { + void shouldRenderIsoWeekYearWithTimezoneCorrectly() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).isoWeekYear()) @@ -1959,7 +1960,7 @@ public void shouldRenderIsoWeekYearWithTimezoneCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderSwitchCorrectly() { + void shouldRenderSwitchCorrectly() { String expected = "$switch:\n" + // "{\n" + // @@ -2001,7 +2002,7 @@ public void shouldRenderSwitchCorrectly() { } @Test // DATAMONGO-1548 - public void shouldTypeCorrectly() { + void shouldTypeCorrectly() { Document agg = project().and(DataTypeOperators.Type.typeOf("a")).as("a").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2009,7 +2010,7 @@ public void shouldTypeCorrectly() { } @Test // DATAMONGO-1834 - public void shouldRenderDateFromPartsWithJustTheYear() { + void shouldRenderDateFromPartsWithJustTheYear() { Document agg = project().and(DateOperators.dateFromParts().year(2018)).as("newDate") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2018,7 +2019,7 @@ public void shouldRenderDateFromPartsWithJustTheYear() { } @Test // DATAMONGO-1834, DATAMONGO-2671 - public void shouldRenderDateFromParts() { + void shouldRenderDateFromParts() { Document agg = project() .and(DateOperators.dateFromParts().year(2018).month(3).day(23).hour(14).minute(25).second(10).millisecond(2)) @@ -2029,7 +2030,7 @@ public void shouldRenderDateFromParts() { } @Test // DATAMONGO-1834 - public void shouldRenderDateFromPartsWithTimezone() { + void shouldRenderDateFromPartsWithTimezone() { Document agg = project() .and(DateOperators.dateFromParts().withTimezone(Timezone.valueOf("America/Chicago")).year(2018)).as("newDate") @@ -2040,7 +2041,7 @@ public void shouldRenderDateFromPartsWithTimezone() { } @Test // DATAMONGO-1834 - public void shouldRenderIsoDateFromPartsWithJustTheYear() { + void shouldRenderIsoDateFromPartsWithJustTheYear() { Document agg = project().and(DateOperators.dateFromParts().isoWeekYear(2018)).as("newDate") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2049,7 +2050,7 @@ public void shouldRenderIsoDateFromPartsWithJustTheYear() { } @Test // DATAMONGO-1834, DATAMONGO-2671 - public void shouldRenderIsoDateFromParts() { + void shouldRenderIsoDateFromParts() { Document agg = project().and(DateOperators.dateFromParts().isoWeekYear(2018).isoWeek(12).isoDayOfWeek(5).hour(14) .minute(30).second(42).millisecond(2)).as("newDate").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2059,7 +2060,7 @@ public void shouldRenderIsoDateFromParts() { } @Test // DATAMONGO-1834 - public void shouldRenderIsoDateFromPartsWithTimezone() { + void shouldRenderIsoDateFromPartsWithTimezone() { Document agg = project() .and(DateOperators.dateFromParts().withTimezone(Timezone.valueOf("America/Chicago")).isoWeekYear(2018)) @@ -2070,7 +2071,7 @@ public void shouldRenderIsoDateFromPartsWithTimezone() { } @Test // DATAMONGO-1834 - public void shouldRenderDateToParts() { + void shouldRenderDateToParts() { Document agg = project().and(DateOperators.dateOf("date").toParts()).as("newDate") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2079,7 +2080,7 @@ public void shouldRenderDateToParts() { } @Test // DATAMONGO-1834 - public void shouldRenderDateToIsoParts() { + void shouldRenderDateToIsoParts() { Document agg = project().and(DateOperators.dateOf("date").toParts().iso8601()).as("newDate") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2089,7 +2090,7 @@ public void shouldRenderDateToIsoParts() { } @Test // DATAMONGO-1834 - public void shouldRenderDateToPartsWithTimezone() { + void shouldRenderDateToPartsWithTimezone() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).toParts()).as("newDate") @@ -2100,7 +2101,7 @@ public void shouldRenderDateToPartsWithTimezone() { } @Test // DATAMONGO-1834 - public void shouldRenderDateFromString() { + void shouldRenderDateFromString() { Document agg = project().and(DateOperators.dateFromString("2017-02-08T12:10:40.787")).as("newDate") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2110,7 +2111,7 @@ public void shouldRenderDateFromString() { } @Test // DATAMONGO-1834 - public void shouldRenderDateFromStringWithFieldReference() { + void shouldRenderDateFromStringWithFieldReference() { Document agg = project().and(DateOperators.dateOf("date").fromString()).as("newDate") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2120,7 +2121,7 @@ public void shouldRenderDateFromStringWithFieldReference() { } @Test // DATAMONGO-1834 - public void shouldRenderDateFromStringWithTimezone() { + void shouldRenderDateFromStringWithTimezone() { Document agg = project() .and(DateOperators.dateFromString("2017-02-08T12:10:40.787").withTimezone(Timezone.valueOf("America/Chicago"))) @@ -2131,7 +2132,7 @@ public void shouldRenderDateFromStringWithTimezone() { } @Test // DATAMONGO-2047 - public void shouldRenderDateFromStringWithFormat() { + void shouldRenderDateFromStringWithFormat() { Document agg = project().and(DateOperators.dateFromString("2017-02-08T12:10:40.787").withFormat("dd/mm/yyyy")) .as("newDate").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2141,7 +2142,7 @@ public void shouldRenderDateFromStringWithFormat() { } @Test // DATAMONGO-2200 - public void typeProjectionShouldIncludeTopLevelFieldsOfType() { + void typeProjectionShouldIncludeTopLevelFieldsOfType() { ProjectionOperation operation = Aggregation.project(Book.class); @@ -2155,7 +2156,7 @@ public void typeProjectionShouldIncludeTopLevelFieldsOfType() { } @Test // DATAMONGO-2200 - public void typeProjectionShouldMapFieldNames() { + void typeProjectionShouldMapFieldNames() { MongoMappingContext mappingContext = new MongoMappingContext(); MongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); @@ -2171,7 +2172,7 @@ public void typeProjectionShouldMapFieldNames() { } @Test // DATAMONGO-2200 - public void typeProjectionShouldIncludeInterfaceProjectionValues() { + void typeProjectionShouldIncludeInterfaceProjectionValues() { ProjectionOperation operation = Aggregation.project(ProjectionInterface.class); @@ -2184,7 +2185,7 @@ public void typeProjectionShouldIncludeInterfaceProjectionValues() { } @Test // DATAMONGO-2200 - public void typeProjectionShouldBeEmptyIfNoPropertiesFound() { + void typeProjectionShouldBeEmptyIfNoPropertiesFound() { ProjectionOperation operation = Aggregation.project(EmptyType.class); @@ -2195,7 +2196,7 @@ public void typeProjectionShouldBeEmptyIfNoPropertiesFound() { } @Test // DATAMONGO-2312 - public void simpleFieldReferenceAsArray() { + void simpleFieldReferenceAsArray() { org.bson.Document doc = Aggregation.newAggregation(project("x", "y", "someField").asArray("myArray")) .toDocument("coll", Aggregation.DEFAULT_CONTEXT); @@ -2205,7 +2206,7 @@ public void simpleFieldReferenceAsArray() { } @Test // DATAMONGO-2312 - public void mappedFieldReferenceAsArray() { + void mappedFieldReferenceAsArray() { MongoMappingContext mappingContext = new MongoMappingContext(); @@ -2219,7 +2220,7 @@ public void mappedFieldReferenceAsArray() { } @Test // DATAMONGO-2312 - public void arrayWithNullValue() { + void arrayWithNullValue() { Document doc = project() // .andArrayOf(Fields.field("field-1"), null, "value").as("myArray") // @@ -2229,7 +2230,7 @@ public void arrayWithNullValue() { } @Test // DATAMONGO-2312 - public void nestedArrayField() { + void nestedArrayField() { Document doc = project("_id", "value") // .andArrayOf(Fields.field("field-1"), "plain - string", ArithmeticOperators.valueOf("field-1").sum().and(10)) @@ -2241,7 +2242,7 @@ public void nestedArrayField() { } @Test // DATAMONGO-2312 - public void nestedMappedFieldReferenceInArrayField() { + void nestedMappedFieldReferenceInArrayField() { MongoMappingContext mappingContext = new MongoMappingContext(); @@ -2289,7 +2290,7 @@ interface ProjectionInterface { String getTitle(); } - static class EmptyType { + private static class EmptyType { } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 41b0323636..e92ea38336 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -32,6 +32,7 @@ * @author Thomas Darimont * @author Oliver Gierke * @author Christoph Strobl + * @author Divya Srivastava */ public class SpelExpressionTransformerUnitTests { @@ -800,68 +801,68 @@ void shouldRenderRtrimWithCharsFromFieldReference() { assertThat(transform("rtrim(field1, field2)")) .isEqualTo("{ \"$rtrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}"); } - - @Test // DATAMONGO-3725 - public void shouldRenderRegexFindWithoutOptions() { - + + @Test // GH-3725 + void shouldRenderRegexFindWithoutOptions() { + assertThat(transform("regexFind(field1,'e')")) - .isEqualTo(Document.parse("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}")); + .isEqualTo("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}"); } - - @Test // DATAMONGO-3725 - public void shouldRenderRegexFindWithOptions() { - + + @Test // GH-3725 + void shouldRenderRegexFindWithOptions() { + assertThat(transform("regexFind(field1,'e','i')")) - .isEqualTo(Document.parse("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}")); + .isEqualTo("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}"); } - - @Test // DATAMONGO-3725 - public void shouldRenderRegexFindWithOptionsFromFieldReference() { - + + @Test // GH-3725 + void shouldRenderRegexFindWithOptionsFromFieldReference() { + assertThat(transform("regexFind(field1,'e',field2)")) - .isEqualTo(Document.parse("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}")); + .isEqualTo("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}"); } - - @Test // DATAMONGO-3725 - public void shouldRenderRegexFindAllWithoutOptions() { - + + @Test // GH-3725 + void shouldRenderRegexFindAllWithoutOptions() { + assertThat(transform("regexFindAll(field1,'e')")) - .isEqualTo(Document.parse("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}")); + .isEqualTo("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}"); } - - @Test // DATAMONGO-3725 - public void shouldRenderRegexFindAllWithOptions() { - + + @Test // GH-3725 + void shouldRenderRegexFindAllWithOptions() { + assertThat(transform("regexFindAll(field1,'e','i')")) - .isEqualTo(Document.parse("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}")); + .isEqualTo("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}"); } - - @Test // DATAMONGO-3725 - public void shouldRenderRegexFindAllWithOptionsFromFieldReference() { - + + @Test // GH-3725 + void shouldRenderRegexFindAllWithOptionsFromFieldReference() { + assertThat(transform("regexFindAll(field1,'e',field2)")) - .isEqualTo(Document.parse("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}")); + .isEqualTo("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}"); } - @Test // DATAMONGO-3725 - public void shouldRenderRegexMatchWithoutOptions() { - + @Test // GH-3725 + void shouldRenderRegexMatchWithoutOptions() { + assertThat(transform("regexMatch(field1,'e')")) - .isEqualTo(Document.parse("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}")); + .isEqualTo("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}"); } - - @Test // DATAMONGO-3725 - public void shouldRenderRegexMatchWithOptions() { - + + @Test // GH-3725 + void shouldRenderRegexMatchWithOptions() { + assertThat(transform("regexMatch(field1,'e','i')")) - .isEqualTo(Document.parse("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}")); + .isEqualTo("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}"); } - - @Test // DATAMONGO-3725 - public void shouldRenderRegexMatchWithOptionsFromFieldReference() { - + + @Test // GH-3725 + void shouldRenderRegexMatchWithOptionsFromFieldReference() { + assertThat(transform("regexMatch(field1,'e',field2)")) - .isEqualTo(Document.parse("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}")); + .isEqualTo("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}"); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java index cdd0b38dbc..d8ba5129e0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java @@ -15,7 +15,9 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.regex.Pattern; import org.bson.Document; import org.junit.jupiter.api.Test; @@ -25,230 +27,258 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Divya Srivastava * @currentRead Royal Assassin - Robin Hobb */ -public class StringOperatorsUnitTests { +class StringOperatorsUnitTests { - static final String EXPRESSION_STRING = "{ \"$fitz\" : \"chivalry\" }"; - static final Document EXPRESSION_DOC = Document.parse(EXPRESSION_STRING); - static final AggregationExpression EXPRESSION = context -> EXPRESSION_DOC; + private static final String EXPRESSION_STRING = "{ \"$fitz\" : \"chivalry\" }"; + private static final Document EXPRESSION_DOC = Document.parse(EXPRESSION_STRING); + private static final AggregationExpression EXPRESSION = context -> EXPRESSION_DOC; @Test // DATAMONGO-2049 - public void shouldRenderTrim() { + void shouldRenderTrim() { assertThat(StringOperators.valueOf("shrewd").trim().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $trim: { \"input\" : \"$shrewd\" } } ")); + .isEqualTo("{ $trim: { \"input\" : \"$shrewd\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderTrimForExpression() { + void shouldRenderTrimForExpression() { assertThat(StringOperators.valueOf(EXPRESSION).trim().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $trim: { \"input\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $trim: { \"input\" : " + EXPRESSION_STRING + " } } "); } @Test // DATAMONGO-2049 - public void shouldRenderTrimWithChars() { + void shouldRenderTrimWithChars() { assertThat(StringOperators.valueOf("shrewd").trim("sh").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $trim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } ")); + .isEqualTo("{ $trim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderTrimWithCharsExpression() { + void shouldRenderTrimWithCharsExpression() { assertThat(StringOperators.valueOf("shrewd").trim(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $trim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $trim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } "); } @Test // DATAMONGO-2049 - public void shouldRenderTrimLeft() { + void shouldRenderTrimLeft() { assertThat(StringOperators.valueOf("shrewd").trim().left().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $ltrim: { \"input\" : \"$shrewd\" } } ")); + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderTrimLeftWithChars() { + void shouldRenderTrimLeftWithChars() { assertThat(StringOperators.valueOf("shrewd").trim("sh").left().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } ")); + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderTrimRight() { + void shouldRenderTrimRight() { assertThat(StringOperators.valueOf("shrewd").trim().right().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $rtrim: { \"input\" : \"$shrewd\" } } ")); + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderTrimRightWithChars() { + void shouldRenderTrimRightWithChars() { assertThat(StringOperators.valueOf("shrewd").trim("sh").right().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } ")); + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderLTrim() { + void shouldRenderLTrim() { assertThat(StringOperators.valueOf("shrewd").ltrim().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $ltrim: { \"input\" : \"$shrewd\" } } ")); + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderLTrimForExpression() { + void shouldRenderLTrimForExpression() { assertThat(StringOperators.valueOf(EXPRESSION).ltrim().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $ltrim: { \"input\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $ltrim: { \"input\" : " + EXPRESSION_STRING + " } } "); } @Test // DATAMONGO-2049 - public void shouldRenderLTrimWithChars() { + void shouldRenderLTrimWithChars() { assertThat(StringOperators.valueOf("shrewd").ltrim("sh").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } ")); + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderLTrimWithCharsExpression() { + void shouldRenderLTrimWithCharsExpression() { assertThat(StringOperators.valueOf("shrewd").ltrim(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } "); } @Test // DATAMONGO-2049 - public void shouldRenderRTrim() { + void shouldRenderRTrim() { assertThat(StringOperators.valueOf("shrewd").rtrim().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $rtrim: { \"input\" : \"$shrewd\" } } ")); + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderRTrimForExpression() { + void shouldRenderRTrimForExpression() { assertThat(StringOperators.valueOf(EXPRESSION).rtrim().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $rtrim: { \"input\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $rtrim: { \"input\" : " + EXPRESSION_STRING + " } } "); } @Test // DATAMONGO-2049 - public void shouldRenderRTrimWithChars() { + void shouldRenderRTrimWithChars() { assertThat(StringOperators.valueOf("shrewd").rtrim("sh").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } ")); + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderRTrimWithCharsExpression() { + void shouldRenderRTrimWithCharsExpression() { assertThat(StringOperators.valueOf("shrewd").rtrim(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindAll() { + + @Test // GH-3725 + void shouldRenderRegexFindAll() { assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }")); + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }"); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindAllForExpression() { + + @Test // GH-3725 + void shouldRenderRegexFindAllForExpression() { assertThat(StringOperators.valueOf(EXPRESSION).regexFindAll("e").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } ")); + .isEqualTo("{ $regexFindAll: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindAllForRegexExpression() { + + @Test // GH-3725 + void shouldRenderRegexFindAllForRegexExpression() { assertThat(StringOperators.valueOf("shrewd").regexFindAll(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllWithPattern() { + + assertThat(StringOperators.valueOf("shrewd") + .regexFindAll( + Pattern.compile("foo", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL | Pattern.COMMENTS)) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"foo\" , \"options\" : \"imsx\" } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindAllWithOptions() { + + @Test // GH-3725 + void shouldRenderRegexFindAllWithOptions() { assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } ")); + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindAllWithOptionsExpression() { + + @Test // GH-3725 + void shouldRenderRegexFindAllWithOptionsExpression() { assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + + " } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexMatch() { + + @Test // GH-3725 + void shouldRenderRegexMatch() { assertThat(StringOperators.valueOf("shrewd").regexMatch("e").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }")); + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }"); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexMatchForExpression() { + + @Test // GH-3725 + void shouldRenderRegexMatchForExpression() { assertThat(StringOperators.valueOf(EXPRESSION).regexMatch("e").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } ")); + .isEqualTo("{ $regexMatch: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexMatchForRegexExpression() { + + @Test // GH-3725 + void shouldRenderRegexMatchForRegexExpression() { assertThat(StringOperators.valueOf("shrewd").regexMatch(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // GH-3725 + void shouldRenderRegexMatchForPattern() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch(Pattern.compile("foo", Pattern.CASE_INSENSITIVE)) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : \"foo\", \"options\" : \"i\"} } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexMatchWithOptions() { + + @Test // GH-3725 + void shouldRenderRegexMatchWithOptions() { assertThat(StringOperators.valueOf("shrewd").regexMatch("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } ")); + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexMatchWithOptionsExpression() { + + @Test // GH-3725 + void shouldRenderRegexMatchWithOptionsExpression() { assertThat(StringOperators.valueOf("shrewd").regexMatch("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + + " } } "); } - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFind() { + @Test // GH-3725 + void shouldRenderRegexFind() { assertThat(StringOperators.valueOf("shrewd").regexFind("e").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }")); + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }"); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindForExpression() { + + @Test // GH-3725 + void shouldRenderRegexFindForExpression() { assertThat(StringOperators.valueOf(EXPRESSION).regexFind("e").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFind: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } ")); + .isEqualTo("{ $regexFind: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindForRegexExpression() { + + @Test // GH-3725 + void shouldRenderRegexFindForRegexExpression() { assertThat(StringOperators.valueOf("shrewd").regexFind(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindWithOptions() { - assertThat(StringOperators.valueOf("shrewd").regexFind("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFind: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } ")); + @Test // GH-3725 + void shouldRenderRegexFindForPattern() { + + assertThat(StringOperators.valueOf("shrewd").regexFind(Pattern.compile("foo", Pattern.MULTILINE)) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : \"foo\", \"options\" : \"m\"} } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindWithOptionsExpression() { - assertThat(StringOperators.valueOf("shrewd").regexFind("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFind: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + " } } ")); + @Test // GH-3725 + void shouldRenderRegexFindWithOptions() { + + assertThat(StringOperators.valueOf("shrewd").regexFind("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } "); } + @Test // GH-3725 + void shouldRenderRegexFindWithOptionsExpression() { + assertThat(StringOperators.valueOf("shrewd").regexFind("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + + " } } "); + } } diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index f96719adde..75ed415096 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -88,7 +88,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `abs`, `add` (+++*+++ via `plus`), `ceil`, `cos`, `cosh`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `sin`, `sinh`, `tan`, `tanh`, `trunc` | String Aggregation Operators -| `concat`, `substr`, `toLower`, `toUpper`, `strcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` +| `concat`, `substr`, `toLower`, `toUpper`, `strcasecmp`, `indexOfBytes`, `indexOfCP`, `regexFind`, `regexFindAll`, `regexMatch`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` | Comparison Aggregation Operators | `eq` (+++*+++ via `is`), `gt`, `gte`, `lt`, `lte`, `ne` From 62eb719b1e9adb050c33be254cbe9bf7a527415e Mon Sep 17 00:00:00 2001 From: James McNee Date: Thu, 26 Aug 2021 21:25:36 +0100 Subject: [PATCH 071/885] Add support for `$sampleRate` criteria. Closes #3726 Original pull request: #3765. --- .../data/mongodb/core/query/Criteria.java | 16 ++++++++++++++++ .../mongodb/core/query/CriteriaUnitTests.java | 16 ++++++++++++++++ .../asciidoc/reference/mongo-repositories.adoc | 4 ++++ 3 files changed, 36 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java index f9a354c38f..3ec4caf3c7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java @@ -64,6 +64,7 @@ * @author Andreas Zink * @author Ziemowit Stolarczyk * @author Clément Petit + * @author James McNee */ public class Criteria implements CriteriaDefinition { @@ -390,6 +391,21 @@ public Criteria exists(boolean value) { return this; } + /** + * Creates a criterion using the {@literal $sampleRate} operator. + * + * @param sampleRate sample rate to determine number of documents to be randomly selected from the input. + * @return this. + * @see MongoDB Query operator: $sampleRate + */ + public Criteria sampleRate(double sampleRate) { + Assert.isTrue(sampleRate >= 0, "The sample rate must be greater than zero!"); + Assert.isTrue(sampleRate <= 1, "The sample rate must not be greater than one!"); + + criteria.put("$sampleRate", sampleRate); + return this; + } + /** * Creates a criterion using the {@literal $type} operator. * diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java index 9edf3c43fd..e24fc34bef 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java @@ -156,6 +156,22 @@ public void shouldNegateFollowingSimpleExpression() { assertThat(co).isEqualTo(Document.parse("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}")); } + @Test // GH-3726 + public void shouldBuildCorrectSampleRateOperation() { + Criteria c = new Criteria().sampleRate(0.4); + assertThat(c.getCriteriaObject()).isEqualTo(Document.parse("{ \"$sampleRate\" : 0.4 }")); + } + + @Test // GH-3726 + public void shouldThrowExceptionWhenSampleRateIsNegative() { + assertThatIllegalArgumentException().isThrownBy(() -> new Criteria().sampleRate(-1)); + } + + @Test // GH-3726 + public void shouldThrowExceptionWhenSampleRateIsGreatedThanOne() { + assertThatIllegalArgumentException().isThrownBy(() -> new Criteria().sampleRate(1.01)); + } + @Test // DATAMONGO-1068 public void getCriteriaObjectShouldReturnEmptyDocumentWhenNoCriteriaSpecified() { diff --git a/src/main/asciidoc/reference/mongo-repositories.adoc b/src/main/asciidoc/reference/mongo-repositories.adoc index 328a547b5a..b847174c67 100644 --- a/src/main/asciidoc/reference/mongo-repositories.adoc +++ b/src/main/asciidoc/reference/mongo-repositories.adoc @@ -281,6 +281,10 @@ lower / upper bounds (`$gt` / `$gte` & `$lt` / `$lte`) according to `Range` | `Exists` | `findByLocationExists(boolean exists)` | `{"location" : {"$exists" : exists }}` + +| `SampleRate` +| `sampleRate(double sampleRate)` +| `{"$sampleRate" : sampleRate }` |=== NOTE: If the property criterion compares a document, the order of the fields and exact equality in the document matters. From f662d7ca0d240a9f719ffec78243fc7661c544a1 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 27 Aug 2021 09:34:40 +0200 Subject: [PATCH 072/885] Polishing. Tweak Javadoc. Add since tag, reformat code. Simplify tests. Move documentation bits into the right place. See #3726. Original pull request: #3765. --- .../data/mongodb/core/query/Criteria.java | 8 ++++-- .../mongodb/core/query/CriteriaUnitTests.java | 27 ++++++++++--------- .../reference/mongo-repositories.adoc | 4 --- src/main/asciidoc/reference/mongodb.adoc | 1 + 4 files changed, 21 insertions(+), 19 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java index 3ec4caf3c7..df167330a1 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java @@ -394,11 +394,15 @@ public Criteria exists(boolean value) { /** * Creates a criterion using the {@literal $sampleRate} operator. * - * @param sampleRate sample rate to determine number of documents to be randomly selected from the input. + * @param sampleRate sample rate to determine number of documents to be randomly selected from the input. Must be + * between {@code 0} and {@code 1}. * @return this. - * @see MongoDB Query operator: $sampleRate + * @see MongoDB Query operator: + * $sampleRate + * @since 3.3 */ public Criteria sampleRate(double sampleRate) { + Assert.isTrue(sampleRate >= 0, "The sample rate must be greater than zero!"); Assert.isTrue(sampleRate <= 1, "The sample rate must not be greater than one!"); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java index e24fc34bef..96253e4ac0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java @@ -40,19 +40,20 @@ * @author Ziemowit Stolarczyk * @author Clément Petit * @author Mark Paluch + * @author James McNee */ public class CriteriaUnitTests { @Test public void testSimpleCriteria() { Criteria c = new Criteria("name").is("Bubba"); - assertThat(c.getCriteriaObject()).isEqualTo(Document.parse("{ \"name\" : \"Bubba\"}")); + assertThat(c.getCriteriaObject()).isEqualTo("{ \"name\" : \"Bubba\"}"); } @Test public void testNotEqualCriteria() { Criteria c = new Criteria("name").ne("Bubba"); - assertThat(c.getCriteriaObject()).isEqualTo(Document.parse("{ \"name\" : { \"$ne\" : \"Bubba\"}}")); + assertThat(c.getCriteriaObject()).isEqualTo("{ \"name\" : { \"$ne\" : \"Bubba\"}}"); } @Test @@ -67,7 +68,7 @@ public void buildsIsNullCriteriaCorrectly() { @Test public void testChainedCriteria() { Criteria c = new Criteria("name").is("Bubba").and("age").lt(21); - assertThat(c.getCriteriaObject()).isEqualTo(Document.parse("{ \"name\" : \"Bubba\" , \"age\" : { \"$lt\" : 21}}")); + assertThat(c.getCriteriaObject()).isEqualTo("{ \"name\" : \"Bubba\" , \"age\" : { \"$lt\" : 21}}"); } @Test(expected = InvalidMongoDbApiUsageException.class) @@ -153,13 +154,13 @@ public void shouldNegateFollowingSimpleExpression() { Document co = c.getCriteriaObject(); assertThat(co).isNotNull(); - assertThat(co).isEqualTo(Document.parse("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}")); + assertThat(co).isEqualTo("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}"); } @Test // GH-3726 public void shouldBuildCorrectSampleRateOperation() { Criteria c = new Criteria().sampleRate(0.4); - assertThat(c.getCriteriaObject()).isEqualTo(Document.parse("{ \"$sampleRate\" : 0.4 }")); + assertThat(c.getCriteriaObject()).isEqualTo("{ \"$sampleRate\" : 0.4 }"); } @Test // GH-3726 @@ -302,7 +303,7 @@ public void shouldAppendBitsAllClearWithIntBitmaskCorrectly() { Criteria numericBitmaskCriteria = new Criteria("field").bits().allClear(0b101); assertThat(numericBitmaskCriteria.getCriteriaObject()) - .isEqualTo(Document.parse("{ \"field\" : { \"$bitsAllClear\" : 5} }")); + .isEqualTo("{ \"field\" : { \"$bitsAllClear\" : 5} }"); } @Test // DATAMONGO-1808 @@ -311,7 +312,7 @@ public void shouldAppendBitsAllClearWithPositionListCorrectly() { Criteria bitPositionsBitmaskCriteria = new Criteria("field").bits().allClear(Arrays.asList(0, 2)); assertThat(bitPositionsBitmaskCriteria.getCriteriaObject()) - .isEqualTo(Document.parse("{ \"field\" : { \"$bitsAllClear\" : [ 0, 2 ]} }")); + .isEqualTo("{ \"field\" : { \"$bitsAllClear\" : [ 0, 2 ]} }"); } @Test // DATAMONGO-1808 @@ -320,7 +321,7 @@ public void shouldAppendBitsAllSetWithIntBitmaskCorrectly() { Criteria numericBitmaskCriteria = new Criteria("field").bits().allSet(0b101); assertThat(numericBitmaskCriteria.getCriteriaObject()) - .isEqualTo(Document.parse("{ \"field\" : { \"$bitsAllSet\" : 5} }")); + .isEqualTo("{ \"field\" : { \"$bitsAllSet\" : 5} }"); } @Test // DATAMONGO-1808 @@ -329,7 +330,7 @@ public void shouldAppendBitsAllSetWithPositionListCorrectly() { Criteria bitPositionsBitmaskCriteria = new Criteria("field").bits().allSet(Arrays.asList(0, 2)); assertThat(bitPositionsBitmaskCriteria.getCriteriaObject()) - .isEqualTo(Document.parse("{ \"field\" : { \"$bitsAllSet\" : [ 0, 2 ]} }")); + .isEqualTo("{ \"field\" : { \"$bitsAllSet\" : [ 0, 2 ]} }"); } @Test // DATAMONGO-1808 @@ -338,7 +339,7 @@ public void shouldAppendBitsAnyClearWithIntBitmaskCorrectly() { Criteria numericBitmaskCriteria = new Criteria("field").bits().anyClear(0b101); assertThat(numericBitmaskCriteria.getCriteriaObject()) - .isEqualTo(Document.parse("{ \"field\" : { \"$bitsAnyClear\" : 5} }")); + .isEqualTo("{ \"field\" : { \"$bitsAnyClear\" : 5} }"); } @Test // DATAMONGO-1808 @@ -347,7 +348,7 @@ public void shouldAppendBitsAnyClearWithPositionListCorrectly() { Criteria bitPositionsBitmaskCriteria = new Criteria("field").bits().anyClear(Arrays.asList(0, 2)); assertThat(bitPositionsBitmaskCriteria.getCriteriaObject()) - .isEqualTo(Document.parse("{ \"field\" : { \"$bitsAnyClear\" : [ 0, 2 ]} }")); + .isEqualTo("{ \"field\" : { \"$bitsAnyClear\" : [ 0, 2 ]} }"); } @Test // DATAMONGO-1808 @@ -356,7 +357,7 @@ public void shouldAppendBitsAnySetWithIntBitmaskCorrectly() { Criteria numericBitmaskCriteria = new Criteria("field").bits().anySet(0b101); assertThat(numericBitmaskCriteria.getCriteriaObject()) - .isEqualTo(Document.parse("{ \"field\" : { \"$bitsAnySet\" : 5} }")); + .isEqualTo("{ \"field\" : { \"$bitsAnySet\" : 5} }"); } @Test // DATAMONGO-1808 @@ -365,7 +366,7 @@ public void shouldAppendBitsAnySetWithPositionListCorrectly() { Criteria bitPositionsBitmaskCriteria = new Criteria("field").bits().anySet(Arrays.asList(0, 2)); assertThat(bitPositionsBitmaskCriteria.getCriteriaObject()) - .isEqualTo(Document.parse("{ \"field\" : { \"$bitsAnySet\" : [ 0, 2 ]} }")); + .isEqualTo("{ \"field\" : { \"$bitsAnySet\" : [ 0, 2 ]} }"); } @Test // DATAMONGO-2002 diff --git a/src/main/asciidoc/reference/mongo-repositories.adoc b/src/main/asciidoc/reference/mongo-repositories.adoc index b847174c67..328a547b5a 100644 --- a/src/main/asciidoc/reference/mongo-repositories.adoc +++ b/src/main/asciidoc/reference/mongo-repositories.adoc @@ -281,10 +281,6 @@ lower / upper bounds (`$gt` / `$gte` & `$lt` / `$lte`) according to `Range` | `Exists` | `findByLocationExists(boolean exists)` | `{"location" : {"$exists" : exists }}` - -| `SampleRate` -| `sampleRate(double sampleRate)` -| `{"$sampleRate" : sampleRate }` |=== NOTE: If the property criterion compares a document, the order of the fields and exact equality in the document matters. diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc index f214edba4c..7bf034f461 100644 --- a/src/main/asciidoc/reference/mongodb.adoc +++ b/src/main/asciidoc/reference/mongodb.adoc @@ -1219,6 +1219,7 @@ The `Criteria` class provides the following methods, all of which correspond to * `Criteria` *orOperator* `(Criteria... criteria)` Creates an or query using the `$or` operator for all of the provided criteria * `Criteria` *orOperator* `(Collection criteria)` Creates an or query using the `$or` operator for all of the provided criteria * `Criteria` *regex* `(String re)` Creates a criterion using a `$regex` +* `Criteria` *sampleRate* `(double sampleRate)` Creates a criterion using the `$sampleRate` operator * `Criteria` *size* `(int s)` Creates a criterion using the `$size` operator * `Criteria` *type* `(int t)` Creates a criterion using the `$type` operator * `Criteria` *matchingDocumentStructure* `(MongoJsonSchema schema)` Creates a criterion using the `$jsonSchema` operator for <>. `$jsonSchema` can only be applied on the top level of a query and not property specific. Use the `properties` attribute of the schema to match against nested fields. From bf86f39b2d5bc2d3c197fbff90551b85b440474a Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 24 Aug 2021 07:31:25 +0200 Subject: [PATCH 073/885] Fix id field target type conversion for document references. This commit fixes an issue where a defined custom target type conversion for the id field was not properly considered when writing a document reference. Previously an eg. String was not being converted into an ObjectId correctly causing lookup queries to return empty results. Converting the id property value on write solves the issue. Includes a minor polish in the mapping centralizing pointer creation within the DocumentPointerFactory. Closes: #3782 Original pull request: #3785. --- .../core/convert/DocumentPointerFactory.java | 11 ++- .../core/convert/MappingMongoConverter.java | 23 ++--- .../MongoTemplateDocumentReferenceTests.java | 98 ++++++++++++++++++- 3 files changed, 114 insertions(+), 18 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java index 09d69e4b27..b30aa957de 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java @@ -83,7 +83,16 @@ DocumentPointer computePointer( .getRequiredPersistentEntity(property.getAssociationTargetType()); if (usesDefaultLookup(property)) { - return () -> persistentEntity.getIdentifierAccessor(value).getIdentifier(); + + MongoPersistentProperty idProperty = persistentEntity.getIdProperty(); + Object idValue = persistentEntity.getIdentifierAccessor(value).getIdentifier(); + + if (idProperty.hasExplicitWriteTarget() + && conversionService.canConvert(idValue.getClass(), idProperty.getFieldType())) { + return () -> conversionService.convert(idValue, idProperty.getFieldType()); + } + + return () -> idValue; } MongoPersistentEntity valueEntity = mappingContext.getPersistentEntity(value.getClass()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 48505559c0..a60c853c33 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -869,15 +869,12 @@ protected List createCollection(Collection collection, MongoPersisten if (!property.isDbReference()) { if (property.isAssociation()) { - return writeCollectionInternal(collection.stream().map(it -> { - if (conversionService.canConvert(it.getClass(), DocumentPointer.class)) { - return conversionService.convert(it, DocumentPointer.class).getPointer(); - } else { - // just take the id as a reference - return mappingContext.getPersistentEntity(property.getAssociationTargetType()).getIdentifierAccessor(it) - .getIdentifier(); - } - }).collect(Collectors.toList()), ClassTypeInformation.from(DocumentPointer.class), new ArrayList<>()); + + List targetCollection = collection.stream().map(it -> { + return documentPointerFactory.computePointer(mappingContext, property, it, property.getActualType()).getPointer(); + }).collect(Collectors.toList()); + + return writeCollectionInternal(targetCollection, ClassTypeInformation.from(DocumentPointer.class), new ArrayList<>()); } if (property.hasExplicitWriteTarget()) { @@ -930,13 +927,7 @@ protected Bson createMap(Map map, MongoPersistentProperty proper if (property.isDbReference()) { document.put(simpleKey, value != null ? createDBRef(value, property) : null); } else { - if (conversionService.canConvert(value.getClass(), DocumentPointer.class)) { - document.put(simpleKey, conversionService.convert(value, DocumentPointer.class).getPointer()); - } else { - // just take the id as a reference - document.put(simpleKey, mappingContext.getPersistentEntity(property.getAssociationTargetType()) - .getIdentifierAccessor(value).getIdentifier()); - } + document.put(simpleKey, documentPointerFactory.computePointer(mappingContext, property, value, property.getActualType()).getPointer()); } } else { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java index fa1deb4f1c..d6bcc10e49 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java @@ -32,6 +32,7 @@ import java.util.Map; import org.bson.Document; +import org.bson.types.ObjectId; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; @@ -44,6 +45,8 @@ import org.springframework.data.mongodb.core.mapping.DocumentPointer; import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.FieldType; +import org.springframework.data.mongodb.core.mapping.MongoId; import org.springframework.data.mongodb.core.query.Update; import org.springframework.data.mongodb.test.util.Client; import org.springframework.data.mongodb.test.util.MongoClientExtension; @@ -106,6 +109,26 @@ void writeSimpleTypeReference() { assertThat(target.get("simpleValueRef")).isEqualTo("ref-1"); } + @Test // GH-3782 + void writeTypeReferenceHavingCustomizedIdTargetType() { + + ObjectId expectedIdValue = new ObjectId(); + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + SingleRefRoot source = new SingleRefRoot(); + source.id = "root-1"; + source.customIdTargetRef = new ObjectRefHavingCustomizedIdTargetType(expectedIdValue.toString(), + "me-the-referenced-object"); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("customIdTargetRef")).isEqualTo(expectedIdValue); + } + @Test // GH-3602 void writeMapTypeReference() { @@ -126,6 +149,26 @@ void writeMapTypeReference() { assertThat(target.get("mapValueRef", Map.class)).containsEntry("frodo", "ref-1").containsEntry("bilbo", "ref-2"); } + @Test // GH-3782 + void writeMapOfTypeReferenceHavingCustomizedIdTargetType() { + + ObjectId expectedIdValue = new ObjectId(); + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot source = new CollectionRefRoot(); + source.id = "root-1"; + source.customIdTargetRefMap = Collections.singletonMap("frodo", + new ObjectRefHavingCustomizedIdTargetType(expectedIdValue.toString(), "me-the-referenced-object")); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("customIdTargetRefMap", Map.class)).containsEntry("frodo", expectedIdValue); + } + @Test // GH-3602 void writeCollectionOfSimpleTypeReference() { @@ -145,6 +188,26 @@ void writeCollectionOfSimpleTypeReference() { assertThat(target.get("simpleValueRef", List.class)).containsExactly("ref-1", "ref-2"); } + @Test // GH-3782 + void writeListOfTypeReferenceHavingCustomizedIdTargetType() { + + ObjectId expectedIdValue = new ObjectId(); + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot source = new CollectionRefRoot(); + source.id = "root-1"; + source.customIdTargetRefList = Collections.singletonList( + new ObjectRefHavingCustomizedIdTargetType(expectedIdValue.toString(), "me-the-referenced-object")); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("customIdTargetRefList", List.class)).containsExactly(expectedIdValue); + } + @Test // GH-3602 void writeObjectTypeReference() { @@ -739,6 +802,26 @@ void updateReferenceWithValue() { assertThat(target).containsEntry("toB", "b"); } + @Test // GH-3782 + void updateReferenceHavingCustomizedIdTargetType() { + + ObjectId expectedIdValue = new ObjectId(); + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + SingleRefRoot root = new SingleRefRoot(); + root.id = "root-1"; + template.save(root); + + template.update(SingleRefRoot.class).apply(new Update().set("customIdTargetRef", + new ObjectRefHavingCustomizedIdTargetType(expectedIdValue.toString(), "b"))).first(); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("customIdTargetRef", expectedIdValue); + } + @Test // GH-3602 void updateReferenceCollectionWithEntity() { @@ -998,6 +1081,8 @@ static class SingleRefRoot { @DocumentReference(lookup = "{ 'refKey1' : '?#{refKey1}', 'refKey2' : '?#{refKey2}' }", lazy = true) // ObjectRefOnNonIdField lazyObjectValueRefOnNonIdFields; + + @DocumentReference ObjectRefHavingCustomizedIdTargetType customIdTargetRef; } @Data @@ -1027,6 +1112,10 @@ static class CollectionRefRoot { @DocumentReference(lookup = "{ 'refKey1' : '?#{refKey1}', 'refKey2' : '?#{refKey2}' }") // List objectValueRefOnNonIdFields; + + @DocumentReference List customIdTargetRefList; + + @DocumentReference Map customIdTargetRefMap; } @FunctionalInterface @@ -1094,6 +1183,14 @@ public Object toReference() { } } + @Data + @AllArgsConstructor + static class ObjectRefHavingCustomizedIdTargetType { + + @MongoId(targetType = FieldType.OBJECT_ID) String id; + String name; + } + static class ReferencableConverter implements Converter> { @Nullable @@ -1196,5 +1293,4 @@ static class UsingAtReference { @Reference // Publisher publisher; } - } From f24e8e5361bf02484e20dc7799bc2a13b808873e Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 1 Sep 2021 10:39:36 +0200 Subject: [PATCH 074/885] Avoid nested Document conversion to primitive types for fields with an explicit write target. We now no longer attempt to convert query Documents into primitive types to avoid e.g. Document to String conversion. Closes: #3783 Original Pull Request: #3797 --- .../mongodb/core/convert/QueryMapper.java | 3 +- .../core/convert/QueryMapperUnitTests.java | 31 +++++++++++++++++-- 2 files changed, 31 insertions(+), 3 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index 7a14f07c4c..e7deb38231 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -778,7 +778,8 @@ protected boolean isKeyword(String candidate) { @Nullable private Object applyFieldTargetTypeHintToValue(Field documentField, @Nullable Object value) { - if (value == null || documentField.getProperty() == null || !documentField.getProperty().hasExplicitWriteTarget()) { + if (value == null || documentField.getProperty() == null || !documentField.getProperty().hasExplicitWriteTarget() + || value instanceof Document || value instanceof DBObject) { return value; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index efd354b866..808263697a 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -33,8 +33,7 @@ import org.bson.types.ObjectId; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; import org.springframework.data.annotation.Transient; @@ -83,9 +82,12 @@ public class QueryMapperUnitTests { @BeforeEach void beforeEach() { + MongoCustomConversions conversions = new MongoCustomConversions(); this.context = new MongoMappingContext(); + this.context.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); + this.converter.setCustomConversions(conversions); this.converter.afterPropertiesSet(); this.mapper = new QueryMapper(converter); @@ -1335,6 +1337,25 @@ void mapStringIdFieldProjection() { assertThat(mappedFields).containsEntry("_id", 1); } + @Test // GH-3783 + void retainsId$InWithStringArray() { + + org.bson.Document mappedQuery = mapper.getMappedObject( + org.bson.Document.parse("{ _id : { $in: [\"5b8bedceb1e0bfc07b008828\"]}}"), + context.getPersistentEntity(WithExplicitStringId.class)); + assertThat(mappedQuery.get("_id")).isEqualTo(org.bson.Document.parse("{ $in: [\"5b8bedceb1e0bfc07b008828\"]}")); + } + + @Test // GH-3783 + void mapsId$InInToObjectIds() { + + org.bson.Document mappedQuery = mapper.getMappedObject( + org.bson.Document.parse("{ _id : { $in: [\"5b8bedceb1e0bfc07b008828\"]}}"), + context.getPersistentEntity(ClassWithDefaultId.class)); + assertThat(mappedQuery.get("_id")) + .isEqualTo(org.bson.Document.parse("{ $in: [ {$oid: \"5b8bedceb1e0bfc07b008828\" } ]}")); + } + class WithDeepArrayNesting { List level0; @@ -1404,6 +1425,12 @@ class WithStringId { String name; } + class WithExplicitStringId { + + @MongoId(FieldType.STRING) String id; + String name; + } + class BigIntegerId { @Id private BigInteger id; From e71ec874ab69ecc3cfd199be5ce9cda76686913e Mon Sep 17 00:00:00 2001 From: divyajnu08 Date: Sun, 29 Aug 2021 16:41:52 +0530 Subject: [PATCH 075/885] Add support for `$expr` operator. Also, allow construction of $match with an AggregationExpression. Closes #3790 --- .../core/aggregation/AddFieldsOperation.java | 1 + .../mongodb/core/aggregation/Aggregation.java | 12 +- .../core/aggregation/EvaluationOperators.java | 109 ++++++++++++++++++ .../core/aggregation/MatchOperation.java | 39 ++++++- .../aggregation/ReplaceRootOperation.java | 1 + .../core/aggregation/SetOperation.java | 1 + .../aggregation/MatchOperationUnitTests.java | 26 +++++ .../aggregation/SetOperationUnitTests.java | 1 + 8 files changed, 187 insertions(+), 3 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java index 3f3dd125d1..90cc828591 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java @@ -201,4 +201,5 @@ public interface ValueAppender { AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values); } } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java index cecc8f2554..55964bab93 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java @@ -498,7 +498,17 @@ public static MatchOperation match(Criteria criteria) { public static MatchOperation match(CriteriaDefinition criteria) { return new MatchOperation(criteria); } - + + /** + * Creates a new {@link MatchOperation} + * + * @return new instance of {@link MatchOperation}. + * @since 1.10 + */ + public static MatchOperation match() { + return new MatchOperation(); + } + /** * Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The * {@code distanceField} defines output field that contains the calculated distance. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java new file mode 100644 index 0000000000..0fb8e25fab --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java @@ -0,0 +1,109 @@ +package org.springframework.data.mongodb.core.aggregation; + +import org.springframework.util.Assert; + +public class EvaluationOperators { + + /** + * Take the value resulting from the given fieldReference. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link EvaluationOperatorFactory}. + */ + public static EvaluationOperatorFactory valueOf(String fieldReference) { + return new EvaluationOperatorFactory(fieldReference); + } + + /** + * Take the value resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link EvaluationOperatorFactory}. + */ + public static EvaluationOperatorFactory valueOf(AggregationExpression expression) { + return new EvaluationOperatorFactory(expression); + } + + public static class EvaluationOperatorFactory { + + private final String fieldReference; + private final AggregationExpression expression; + + /** + * Creates new {@link EvaluationOperatorFactory} for given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + */ + public EvaluationOperatorFactory(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + this.fieldReference = fieldReference; + this.expression = null; + } + + + /** + * Creates new {@link EvaluationOperatorFactory} for given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public EvaluationOperatorFactory(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null!"); + this.fieldReference = null; + this.expression = expression; + } + + /** + * Creates new {@link AggregationExpression} that is a valid aggregation expression. + * + * @return new instance of {@link Expr}. + */ + public Expr expr() { + return usesFieldRef() ? Expr.valueOf(fieldReference) : Expr.valueOf(expression); + } + + + public static class Expr extends AbstractAggregationExpression { + + private Expr(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$expr"; + } + + /** + * Creates new {@link Expr}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Expr}. + */ + public static Expr valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new Expr(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Expr}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Expr}. + */ + public static Expr valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null!"); + return new Expr(expression); + } + + } + + private boolean usesFieldRef() { + return fieldReference != null; + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java index c9d83ae6c8..c2796aaa03 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java @@ -16,6 +16,7 @@ package org.springframework.data.mongodb.core.aggregation; import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.EvaluationOperators.EvaluationOperatorFactory.Expr; import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.util.Assert; @@ -36,7 +37,16 @@ public class MatchOperation implements AggregationOperation { private final CriteriaDefinition criteriaDefinition; - + private final AggregationExpression expression; + + /** + * Creates a new {@link MatchOperation} + */ + public MatchOperation() { + this.criteriaDefinition = null; + this.expression = null; + } + /** * Creates a new {@link MatchOperation} for the given {@link CriteriaDefinition}. * @@ -46,14 +56,39 @@ public MatchOperation(CriteriaDefinition criteriaDefinition) { Assert.notNull(criteriaDefinition, "Criteria must not be null!"); this.criteriaDefinition = criteriaDefinition; + this.expression = null; } - + + /** + * Creates a new {@link MatchOperation} for the given {@link Expression}. + * + * @param criteriaDefinition must not be {@literal null}. + */ + private MatchOperation(Expr expression) { + Assert.notNull(expression, "Expression must not be null!"); + this.criteriaDefinition = null; + this.expression = expression; + } + + /** + * Creates a new {@link MatchOperation} for the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public MatchOperation withValueOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new MatchOperation(EvaluationOperators.valueOf(expression).expr()); + } + /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) */ @Override public Document toDocument(AggregationOperationContext context) { + if(expression != null) { + return new Document(getOperator(), expression.toDocument()); + } return new Document(getOperator(), context.getMappedObject(criteriaDefinition.getCriteriaObject())); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperation.java index c452ffb8ea..94f9785595 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperation.java @@ -21,6 +21,7 @@ import java.util.List; import org.bson.Document; + import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; import org.springframework.expression.spel.ast.Projection; import org.springframework.util.Assert; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperation.java index 731668ed3c..d065f81662 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperation.java @@ -193,5 +193,6 @@ public interface ValueAppender { */ SetOperation withValueOfExpression(String operation, Object... values); } + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java new file mode 100644 index 0000000000..04d3824de1 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java @@ -0,0 +1,26 @@ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +class MatchOperationUnitTests { + + @Test // DATAMONGO - 3729 + public void shouldRenderStdDevPopCorrectly() { + MatchOperation operation = Aggregation.match().withValueOf(ArithmeticOperators.valueOf("quiz").stdDevPop()); + assertThat(operation.toDocument(Aggregation.DEFAULT_CONTEXT)). + isEqualTo(Document.parse("{ $match: { \"$expr\" : { \"$stdDevPop\" : \"$quiz\" } } } ")); + + } + + @Test // DATAMONGO - 3729 + public void shouldRenderStdDevSampCorrectly() { + MatchOperation operation = Aggregation.match().withValueOf(ArithmeticOperators.valueOf("quiz").stdDevSamp()); + assertThat(operation.toDocument(Aggregation.DEFAULT_CONTEXT)). + isEqualTo(Document.parse("{ $match: { \"$expr\" : { \"$stdDevSamp\" : \"$quiz\" } } } ")); + + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetOperationUnitTests.java index b90b049da1..8fd8bd5526 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetOperationUnitTests.java @@ -21,6 +21,7 @@ import org.bson.Document; import org.junit.jupiter.api.Test; + import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.convert.QueryMapper; From 34d66a276ac35a24f076d565543803e4392c5880 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Mon, 6 Sep 2021 15:07:02 +0200 Subject: [PATCH 076/885] Polishing. Add license headers. Update Javadoc, author, and since tags. Add tests. Add toCriteriaDefinition method. See #3790 --- .../mongodb/core/aggregation/Aggregation.java | 13 ++-- .../core/aggregation/EvaluationOperators.java | 66 ++++++++++++++++--- .../core/aggregation/MatchOperation.java | 44 +++++-------- .../EvaluationOperatorsUnitTests.java | 35 ++++++++++ .../aggregation/MatchOperationUnitTests.java | 29 ++++---- .../ReplaceRootOperationUnitTests.java | 16 ++--- .../ReplaceWithOperationUnitTests.java | 8 +-- .../core/convert/QueryMapperUnitTests.java | 18 +++++ 8 files changed, 156 insertions(+), 73 deletions(-) create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperatorsUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java index 55964bab93..614489692c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java @@ -498,17 +498,18 @@ public static MatchOperation match(Criteria criteria) { public static MatchOperation match(CriteriaDefinition criteria) { return new MatchOperation(criteria); } - + /** - * Creates a new {@link MatchOperation} + * Creates a new {@link MatchOperation} using the given {@link AggregationExpression}. * + * @param expression must not be {@literal null}. * @return new instance of {@link MatchOperation}. - * @since 1.10 + * @since 3.3 */ - public static MatchOperation match() { - return new MatchOperation(); + public static MatchOperation match(AggregationExpression expression) { + return new MatchOperation(expression); } - + /** * Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The * {@code distanceField} defines output field that contains the calculated distance. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java index 0fb8e25fab..181bab5ef5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java @@ -1,9 +1,33 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.springframework.data.mongodb.core.aggregation; +import org.bson.Document; + +import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.util.Assert; +/** + * Gateway to {@literal evaluation operators} such as {@literal $expr}. + * + * @author Divya Srivastava + * @since 3.3 + */ public class EvaluationOperators { - + /** * Take the value resulting from the given fieldReference. * @@ -13,7 +37,7 @@ public class EvaluationOperators { public static EvaluationOperatorFactory valueOf(String fieldReference) { return new EvaluationOperatorFactory(fieldReference); } - + /** * Take the value resulting from the given {@link AggregationExpression}. * @@ -23,12 +47,12 @@ public static EvaluationOperatorFactory valueOf(String fieldReference) { public static EvaluationOperatorFactory valueOf(AggregationExpression expression) { return new EvaluationOperatorFactory(expression); } - + public static class EvaluationOperatorFactory { - + private final String fieldReference; private final AggregationExpression expression; - + /** * Creates new {@link EvaluationOperatorFactory} for given {@literal fieldReference}. * @@ -41,7 +65,6 @@ public EvaluationOperatorFactory(String fieldReference) { this.expression = null; } - /** * Creates new {@link EvaluationOperatorFactory} for given {@link AggregationExpression}. * @@ -53,7 +76,7 @@ public EvaluationOperatorFactory(AggregationExpression expression) { this.fieldReference = null; this.expression = expression; } - + /** * Creates new {@link AggregationExpression} that is a valid aggregation expression. * @@ -62,8 +85,10 @@ public EvaluationOperatorFactory(AggregationExpression expression) { public Expr expr() { return usesFieldRef() ? Expr.valueOf(fieldReference) : Expr.valueOf(expression); } - - + + /** + * Allows the use of aggregation expressions within the query language. + */ public static class Expr extends AbstractAggregationExpression { private Expr(Object value) { @@ -99,8 +124,29 @@ public static Expr valueOf(AggregationExpression expression) { return new Expr(expression); } + /** + * Creates {@code $expr} as {@link CriteriaDefinition}. + * + * @return the {@link CriteriaDefinition} from this expression. + */ + public CriteriaDefinition toCriteriaDefinition(AggregationOperationContext context) { + + Document criteriaObject = toDocument(context); + + return new CriteriaDefinition() { + @Override + public Document getCriteriaObject() { + return criteriaObject; + } + + @Override + public String getKey() { + return getMongoMethod(); + } + }; + } } - + private boolean usesFieldRef() { return fieldReference != null; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java index c2796aaa03..c3d1f366ec 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java @@ -16,7 +16,7 @@ package org.springframework.data.mongodb.core.aggregation; import org.bson.Document; -import org.springframework.data.mongodb.core.aggregation.EvaluationOperators.EvaluationOperatorFactory.Expr; + import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.util.Assert; @@ -30,6 +30,7 @@ * @author Sebastian Herold * @author Thomas Darimont * @author Oliver Gierke + * @author Divya Srivastava * @since 1.3 * @see MongoDB Aggregation Framework: * $match @@ -38,15 +39,7 @@ public class MatchOperation implements AggregationOperation { private final CriteriaDefinition criteriaDefinition; private final AggregationExpression expression; - - /** - * Creates a new {@link MatchOperation} - */ - public MatchOperation() { - this.criteriaDefinition = null; - this.expression = null; - } - + /** * Creates a new {@link MatchOperation} for the given {@link CriteriaDefinition}. * @@ -55,41 +48,34 @@ public MatchOperation() { public MatchOperation(CriteriaDefinition criteriaDefinition) { Assert.notNull(criteriaDefinition, "Criteria must not be null!"); + this.criteriaDefinition = criteriaDefinition; this.expression = null; } - - /** - * Creates a new {@link MatchOperation} for the given {@link Expression}. - * - * @param criteriaDefinition must not be {@literal null}. - */ - private MatchOperation(Expr expression) { - Assert.notNull(expression, "Expression must not be null!"); - this.criteriaDefinition = null; - this.expression = expression; - } - + /** * Creates a new {@link MatchOperation} for the given {@link AggregationExpression}. * * @param expression must not be {@literal null}. + * @since 3.3 */ - public MatchOperation withValueOf(AggregationExpression expression) { + public MatchOperation(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); - return new MatchOperation(EvaluationOperators.valueOf(expression).expr()); + + this.criteriaDefinition = null; + this.expression = expression; } - + /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) */ @Override public Document toDocument(AggregationOperationContext context) { - if(expression != null) { - return new Document(getOperator(), expression.toDocument()); - } - return new Document(getOperator(), context.getMappedObject(criteriaDefinition.getCriteriaObject())); + + return new Document(getOperator(), + context.getMappedObject(expression != null ? expression.toDocument() : criteriaDefinition.getCriteriaObject())); } /* diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperatorsUnitTests.java new file mode 100644 index 0000000000..67f5093b8f --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperatorsUnitTests.java @@ -0,0 +1,35 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link EvaluationOperators}. + * + * @author Mark Paluch + */ +class EvaluationOperatorsUnitTests { + + @Test // GH-3790 + void shouldRenderExprCorrectly() { + + assertThat(EvaluationOperators.valueOf("foo").expr().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $expr: \"$foo\" }"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java index 04d3824de1..ec3decb7a8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java @@ -1,26 +1,23 @@ package org.springframework.data.mongodb.core.aggregation; -import static org.assertj.core.api.Assertions.*; -import org.bson.Document; +import static org.springframework.data.mongodb.test.util.Assertions.*; + import org.junit.jupiter.api.Test; +/** + * Unit tests for {@link MatchOperation}. + * + * @author Divya Srivastava + */ class MatchOperationUnitTests { - - @Test // DATAMONGO - 3729 - public void shouldRenderStdDevPopCorrectly() { - MatchOperation operation = Aggregation.match().withValueOf(ArithmeticOperators.valueOf("quiz").stdDevPop()); - assertThat(operation.toDocument(Aggregation.DEFAULT_CONTEXT)). - isEqualTo(Document.parse("{ $match: { \"$expr\" : { \"$stdDevPop\" : \"$quiz\" } } } ")); - - } - - @Test // DATAMONGO - 3729 - public void shouldRenderStdDevSampCorrectly() { - MatchOperation operation = Aggregation.match().withValueOf(ArithmeticOperators.valueOf("quiz").stdDevSamp()); + + @Test // GH-3790 + void matchShouldRenderCorrectly() { + + MatchOperation operation = Aggregation.match(ArithmeticOperators.valueOf("quiz").stdDevPop()); assertThat(operation.toDocument(Aggregation.DEFAULT_CONTEXT)). - isEqualTo(Document.parse("{ $match: { \"$expr\" : { \"$stdDevSamp\" : \"$quiz\" } } } ")); - + isEqualTo("{ $match: { \"$stdDevPop\" : \"$quiz\" } } "); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperationUnitTests.java index e97e1ff018..9fbc36586f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperationUnitTests.java @@ -27,20 +27,20 @@ * * @author Mark Paluch */ -public class ReplaceRootOperationUnitTests { +class ReplaceRootOperationUnitTests { @Test // DATAMONGO-1550 - public void rejectsNullField() { + void rejectsNullField() { assertThatIllegalArgumentException().isThrownBy(() -> new ReplaceRootOperation((Field) null)); } @Test // DATAMONGO-1550 - public void rejectsNullExpression() { + void rejectsNullExpression() { assertThatIllegalArgumentException().isThrownBy(() -> new ReplaceRootOperation((AggregationExpression) null)); } @Test // DATAMONGO-1550 - public void shouldRenderCorrectly() { + void shouldRenderCorrectly() { ReplaceRootOperation operation = ReplaceRootDocumentOperation.builder() .withDocument(new Document("hello", "world")); @@ -50,7 +50,7 @@ public void shouldRenderCorrectly() { } @Test // DATAMONGO-1550 - public void shouldRenderExpressionCorrectly() { + void shouldRenderExpressionCorrectly() { ReplaceRootOperation operation = new ReplaceRootOperation(VariableOperators // .mapItemsOf("array") // @@ -64,7 +64,7 @@ public void shouldRenderExpressionCorrectly() { } @Test // DATAMONGO-1550 - public void shouldComposeDocument() { + void shouldComposeDocument() { ReplaceRootOperation operation = ReplaceRootDocumentOperation.builder().withDocument() // .andValue("value").as("key") // @@ -77,7 +77,7 @@ public void shouldComposeDocument() { } @Test // DATAMONGO-1550 - public void shouldComposeSubDocument() { + void shouldComposeSubDocument() { Document partialReplacement = new Document("key", "override").append("key2", "value2"); @@ -92,7 +92,7 @@ public void shouldComposeSubDocument() { } @Test // DATAMONGO-1550 - public void shouldNotExposeFields() { + void shouldNotExposeFields() { ReplaceRootOperation operation = new ReplaceRootOperation(Fields.field("field")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperationUnitTests.java index 8f8b5c9dd1..d1a21a254c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperationUnitTests.java @@ -25,15 +25,15 @@ * * @author Christoph Strobl */ -public class ReplaceWithOperationUnitTests { +class ReplaceWithOperationUnitTests { @Test // DATAMONGO-2331 - public void rejectsNullField() { + void rejectsNullField() { assertThatIllegalArgumentException().isThrownBy(() -> new ReplaceWithOperation(null)); } @Test // DATAMONGO-2331 - public void shouldRenderValueCorrectly() { + void shouldRenderValueCorrectly() { ReplaceWithOperation operation = ReplaceWithOperation.replaceWithValue(new Document("hello", "world")); Document dbObject = operation.toDocument(Aggregation.DEFAULT_CONTEXT); @@ -42,7 +42,7 @@ public void shouldRenderValueCorrectly() { } @Test // DATAMONGO-2331 - public void shouldRenderExpressionCorrectly() { + void shouldRenderExpressionCorrectly() { ReplaceWithOperation operation = ReplaceWithOperation.replaceWithValueOf(VariableOperators // .mapItemsOf("array") // diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index 808263697a..46db6e7d6a 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -43,6 +43,9 @@ import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.DocumentTestUtils; import org.springframework.data.mongodb.core.Person; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators; +import org.springframework.data.mongodb.core.aggregation.EvaluationOperators; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.geo.GeoJsonPoint; import org.springframework.data.mongodb.core.geo.GeoJsonPolygon; import org.springframework.data.mongodb.core.mapping.DBRef; @@ -1330,6 +1333,21 @@ void allowsUsingFieldPathsForPropertiesHavingCustomConversionRegistered() { assertThat(mapper.getMappedSort(query.getQueryObject(), context.getPersistentEntity(Customer.class))).isEqualTo(new org.bson.Document("address.street", "1007 Mountain Drive")); } + @Test // GH-3790 + void shouldAcceptExprAsCriteriaDefinition() { + + EvaluationOperators.EvaluationOperatorFactory.Expr expr = EvaluationOperators + .valueOf(ConditionalOperators.ifNull("customizedField").then(true)).expr(); + + Query query = query( + expr.toCriteriaDefinition(new TypeBasedAggregationOperationContext(EmbeddedClass.class, context, mapper))); + + org.bson.Document mappedQuery = mapper.getMappedObject(query.getQueryObject(), + context.getRequiredPersistentEntity(EmbeddedClass.class)); + + assertThat(mappedQuery).isEqualTo("{ $expr : { $ifNull : [\"$fancy_custom_name\", true] } }"); + } + @Test // GH-3668 void mapStringIdFieldProjection() { From ffceed8da96bb2b83206a87440bebc8d30687c10 Mon Sep 17 00:00:00 2001 From: divya srivastava Date: Sun, 29 Aug 2021 19:07:14 +0530 Subject: [PATCH 077/885] Add support for `$atan`, `$atan2` and `$atanh` aggregation operators. Closes #3709 Original pull request: #3794. --- .../core/aggregation/ArithmeticOperators.java | 258 ++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 3 + .../data/mongodb/util/RegexFlags.java | 2 +- .../ArithmeticOperatorsUnitTests.java | 22 ++ .../SpelExpressionTransformerUnitTests.java | 15 + 5 files changed, 299 insertions(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 8fe3d9120c..bf10488f99 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -790,6 +790,68 @@ public Cosh cosh(AngularUnit unit) { public Tan tan() { return tan(AngularUnit.RADIANS); } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of a numeric value. + * + * @return new instance of {@link ATan}. + */ + public ATan atan() { + return usesFieldRef() ? ATan.atanOf(fieldReference) : ATan.atanOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value + * divided by the given numeric value in the argument. + * + * @param the numeric value + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2(Number value) { + + Assert.notNull(value, "Value must not be null!"); + return createATan2().atan2of(value); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value + * divided by the given field reference in the argument. + * + * @param the numeric value + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return createATan2().atan2of(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value + * divided by the given {@link AggregationExpression} in the argument. + * + * @param the numeric value + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null!"); + return createATan2().atan2of(expression); + } + + private ATan2 createATan2() { + + return usesFieldRef() ? ATan2.valueOf(fieldReference) : ATan2.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a numeric value. + * + * @return new instance of {@link ATanh}. + */ + public ATanh atanh() { + return usesFieldRef() ? ATanh.atanhOf(fieldReference) : ATanh.atanhOf(expression); + } /** * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value in the given @@ -2579,6 +2641,148 @@ protected String getMongoMethod() { return "$tan"; } } + + + /** + * An {@link AggregationExpression expression} that calculates the inverse tangent of a value. + * + */ + public static class ATan extends AbstractAggregationExpression { + + private ATan(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new ATan(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + *

+ * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanOf(AggregationExpression expression) { + return new ATan(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanof(Number value) { + return new ATan(value); + } + + @Override + protected String getMongoMethod() { + return "$atan"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse + * tangent of y / x, where y and x are the first and second values passed to the + * expression respectively. + * + */ + public static class ATan2 extends AbstractAggregationExpression { + + private ATan2(List value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * tangent of of y / x, where y and x are the first and second values passed to + * the expression respectively. + * + * @param fieldReference the name of the {@link Field field} that resolves to a + * numeric value. + * @return new instance of {@link ATan2}. + */ + public static ATan2 valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new ATan2(asFields(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * tangent of of y / x, where y and x are the first and second values passed to + * the expression respectively. + * + * @param expression the {@link AggregationExpression expression} that resolves + * to a numeric value. + * @return new instance of {@link ATan2}. + */ + public static ATan2 valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null!"); + return new ATan2((Collections.singletonList(expression))); + } + + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * tangent of of y / x, where y and x are the first and second values passed to + * the expression respectively. + * + * @param value anything ({@link Field field}, {@link AggregationExpression + * expression}, ...) that resolves to a numeric value. + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new ATan2(append(Fields.field(fieldReference))); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null!"); + return new ATan2(append(expression)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * tangent of of y / x, where y and x are the first and second values passed to + * the expression respectively. + * + * @param value of type {@link Number} + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(Number value) { + + return new ATan2(append(value)); + } + + @Override + protected String getMongoMethod() { + return "$atan2"; + } + } /** * An {@link AggregationExpression expression} that calculates the hyperbolic tangent of a value that is measured in @@ -2684,6 +2888,60 @@ protected String getMongoMethod() { return "$tanh"; } } + + /** + * An {@link AggregationExpression expression} that calculates the inverse + * hyperbolic tangent of a value + * + */ + public static class ATanh extends AbstractAggregationExpression { + + private ATanh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * hyperbolic tangent of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a + * numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhOf(String fieldReference) { + return new ATanh(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * hyperbolic tangent of a value. + *

+ * + * @param expression the {@link AggregationExpression expression} that resolves + * to a numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhOf(AggregationExpression expression) { + return new ATanh(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * hyperbolic tangent of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression + * expression}, ...) that resolves to a numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhof(Object value) { + return new ATanh(value); + } + + @Override + protected String getMongoMethod() { + return "$atanh"; + } + } /** * {@link Rand} returns a floating value between 0 and 1. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 0fbfe51f09..0f27c463e2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -100,6 +100,9 @@ public class MethodReferenceNode extends ExpressionNode { map.put("tan", singleArgRef().forOperator("$tan")); map.put("tanh", singleArgRef().forOperator("$tanh")); map.put("rand", emptyRef().forOperator("$rand")); + map.put("atan", singleArgRef().forOperator("$atan")); + map.put("atan2", arrayArgRef().forOperator("$atan2")); + map.put("atanh", singleArgRef().forOperator("$atanh")); // STRING OPERATORS map.put("concat", arrayArgRef().forOperator("$concat")); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java index dfee94954c..ba6531e93c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java @@ -113,4 +113,4 @@ public static int toRegexFlag(char c) { return flag; } -} +} \ No newline at end of file diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index 02f76d5c10..8a52a8a2f5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -166,6 +166,28 @@ void rendersTanhWithValueInDegrees() { assertThat(valueOf("angle").tanh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo("{ $tanh : { $degreesToRadians : \"$angle\" } }"); } + + @Test // DATAMONGO - 3709 + void rendersATan() { + + assertThat(valueOf("field").atan().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $atan : \"$field\" }"); + } + + @Test // DATAMONGO - 3709 + void rendersATan2() { + + assertThat(valueOf("field1").atan2("field2").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $atan2 : [ \"$field1\" , \"$field2\" ] }"); + } + + @Test // DATAMONGO - 3709 + void rendersATanh() { + + assertThat(valueOf("field").atanh().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $atanh : \"$field\" }"); + } + @Test // GH-3724 void rendersRand() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index e92ea38336..c9ba9c12e7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1098,6 +1098,21 @@ void shouldRenderTan() { void shouldRenderTanh() { assertThat(transform("tanh(angle)")).isEqualTo("{ \"$tanh\" : \"$angle\"}"); } + + @Test // DATAMONGO - 3709 + void shouldRenderATan() { + assertThat(transform("atan(number)")).isEqualTo("{ \"$atan\" : \"$number\"}"); + } + + @Test // DATAMONGO - 3709 + void shouldRenderATan2() { + assertThat(transform("atan2(number1,number2)")).isEqualTo("{ \"$atan2\" : [ \"$number1\" , \"$number2\" ] }"); + } + + @Test // DATAMONGO - 3709 + void shouldRenderATanh() { + assertThat(transform("atanh(number)")).isEqualTo("{ \"$atanh\" : \"$number\"}"); + } @Test // GH-3713 void shouldRenderDateAdd() { From 8af904b81fb190e6b9663629daff76b11ed8028f Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Mon, 6 Sep 2021 15:46:07 +0200 Subject: [PATCH 078/885] Polishing. Add author and since tags. Tweak Javadoc format. See #3709 Original pull request: #3794. --- .../core/aggregation/ArithmeticOperators.java | 155 +++++++++--------- .../ArithmeticOperatorsUnitTests.java | 46 +++--- .../reference/aggregation-framework.adoc | 2 +- 3 files changed, 93 insertions(+), 110 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index bf10488f99..d21d985882 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -717,7 +717,7 @@ public Sin sin(AngularUnit unit) { * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in * {@link AngularUnit#RADIANS radians}. * - * @return new instance of {@link Sin}. + * @return new instance of {@link Sinh}. * @since 3.3 */ public Sinh sinh() { @@ -728,7 +728,7 @@ public Sinh sinh() { * Creates new {@link AggregationExpression} that calculates the sine of a numeric value. * * @param unit the unit of measure. - * @return new instance of {@link Sin}. + * @return new instance of {@link Sinh}. * @since 3.3 */ public Sinh sinh(AngularUnit unit) { @@ -739,7 +739,7 @@ public Sinh sinh(AngularUnit unit) { * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value given in * {@link AngularUnit#RADIANS radians}. * - * @return new instance of {@link Sin}. + * @return new instance of {@link Cos}. * @since 3.3 */ public Cos cos() { @@ -751,7 +751,7 @@ public Cos cos() { * {@link AngularUnit unit}. * * @param unit the unit of measure. - * @return new instance of {@link Sin}. + * @return new instance of {@link Cos}. * @since 3.3 */ public Cos cos(AngularUnit unit) { @@ -762,7 +762,7 @@ public Cos cos(AngularUnit unit) { * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value given in * {@link AngularUnit#RADIANS radians}. * - * @return new instance of {@link Sin}. + * @return new instance of {@link Cosh}. * @since 3.3 */ public Cosh cosh() { @@ -773,7 +773,7 @@ public Cosh cosh() { * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value. * * @param unit the unit of measure. - * @return new instance of {@link Sin}. + * @return new instance of {@link Cosh}. * @since 3.3 */ public Cosh cosh(AngularUnit unit) { @@ -784,70 +784,75 @@ public Cosh cosh(AngularUnit unit) { * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value given in * {@link AngularUnit#RADIANS radians}. * - * @return new instance of {@link Sin}. + * @return new instance of {@link Tan}. * @since 3.3 */ public Tan tan() { return tan(AngularUnit.RADIANS); } - + /** * Creates new {@link AggregationExpression} that calculates the inverse tangent of a numeric value. * * @return new instance of {@link ATan}. + * @since 3.3 */ public ATan atan() { return usesFieldRef() ? ATan.atanOf(fieldReference) : ATan.atanOf(expression); } - + /** - * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value - * divided by the given numeric value in the argument. + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given numeric value in the argument. * - * @param the numeric value + * @param the numeric value * @return new instance of {@link ATan2}. + * @since 3.3 */ public ATan2 atan2(Number value) { - + Assert.notNull(value, "Value must not be null!"); return createATan2().atan2of(value); } - + /** - * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value - * divided by the given field reference in the argument. + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given field reference in the argument. * - * @param the numeric value + * @param the numeric value * @return new instance of {@link ATan2}. + * @since 3.3 */ public ATan2 atan2(String fieldReference) { - + Assert.notNull(fieldReference, "FieldReference must not be null!"); return createATan2().atan2of(fieldReference); } - + /** - * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value - * divided by the given {@link AggregationExpression} in the argument. + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given {@link AggregationExpression} in the argument. * - * @param the numeric value + * @param the numeric value * @return new instance of {@link ATan2}. + * @since 3.3 */ public ATan2 atan2(AggregationExpression expression) { - + Assert.notNull(expression, "Expression must not be null!"); return createATan2().atan2of(expression); } - + private ATan2 createATan2() { - + return usesFieldRef() ? ATan2.valueOf(fieldReference) : ATan2.valueOf(expression); } - + /** * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a numeric value. * * @return new instance of {@link ATanh}. + * @since 3.3 */ public ATanh atanh() { return usesFieldRef() ? ATanh.atanhOf(fieldReference) : ATanh.atanhOf(expression); @@ -858,7 +863,7 @@ public ATanh atanh() { * {@link AngularUnit unit}. * * @param unit the unit of measure. - * @return new instance of {@link Sin}. + * @return new instance of {@link Tan}. * @since 3.3 */ public Tan tan(AngularUnit unit) { @@ -869,7 +874,7 @@ public Tan tan(AngularUnit unit) { * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value given in * {@link AngularUnit#RADIANS radians}. * - * @return new instance of {@link Sin}. + * @return new instance of {@link Tan}. * @since 3.3 */ public Tanh tanh() { @@ -880,7 +885,7 @@ public Tanh tanh() { * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value. * * @param unit the unit of measure. - * @return new instance of {@link Sin}. + * @return new instance of {@link Tanh}. * @since 3.3 */ public Tanh tanh(AngularUnit unit) { @@ -2357,8 +2362,6 @@ private Cos(Object value) { * { $cos : { $degreesToRadians : "$angle" } } * * - * . - * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link Cos}. */ @@ -2470,8 +2473,6 @@ public static Cosh coshOf(String fieldReference) { * { $cosh : { $degreesToRadians : "$angle" } } * * - * . - * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Cosh}. @@ -2563,8 +2564,6 @@ private Tan(Object value) { * { $tan : { $degreesToRadians : "$angle" } } * * - * . - * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link Tan}. */ @@ -2641,11 +2640,12 @@ protected String getMongoMethod() { return "$tan"; } } - - + /** * An {@link AggregationExpression expression} that calculates the inverse tangent of a value. * + * @author Divya Srivastava + * @since 3.3 */ public static class ATan extends AbstractAggregationExpression { @@ -2660,14 +2660,13 @@ private ATan(Object value) { * @return new instance of {@link ATan}. */ public static ATan atanOf(String fieldReference) { - + Assert.notNull(fieldReference, "FieldReference must not be null!"); return new ATan(Fields.field(fieldReference)); } /** * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. - *

* * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link ATan}. @@ -2683,7 +2682,7 @@ public static ATan atanOf(AggregationExpression expression) { * numeric value. * @return new instance of {@link ATan}. */ - public static ATan atanof(Number value) { + public static ATan atanOf(Number value) { return new ATan(value); } @@ -2692,26 +2691,25 @@ protected String getMongoMethod() { return "$atan"; } } - + /** - * An {@link AggregationExpression expression} that calculates the inverse - * tangent of y / x, where y and x are the first and second values passed to the - * expression respectively. + * An {@link AggregationExpression expression} that calculates the inverse tangent of y / x, where y and x are the + * first and second values passed to the expression respectively. * + * @author Divya Srivastava + * @since 3.3 */ public static class ATan2 extends AbstractAggregationExpression { - + private ATan2(List value) { super(value); } /** - * Creates a new {@link AggregationExpression} that calculates the inverse - * tangent of of y / x, where y and x are the first and second values passed to - * the expression respectively. - * - * @param fieldReference the name of the {@link Field field} that resolves to a - * numeric value. + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link ATan2}. */ public static ATan2 valueOf(String fieldReference) { @@ -2721,12 +2719,10 @@ public static ATan2 valueOf(String fieldReference) { } /** - * Creates a new {@link AggregationExpression} that calculates the inverse - * tangent of of y / x, where y and x are the first and second values passed to - * the expression respectively. - * - * @param expression the {@link AggregationExpression expression} that resolves - * to a numeric value. + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link ATan2}. */ public static ATan2 valueOf(AggregationExpression expression) { @@ -2737,12 +2733,11 @@ public static ATan2 valueOf(AggregationExpression expression) { /** - * Creates a new {@link AggregationExpression} that calculates the inverse - * tangent of of y / x, where y and x are the first and second values passed to - * the expression respectively. - * - * @param value anything ({@link Field field}, {@link AggregationExpression - * expression}, ...) that resolves to a numeric value. + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. * @return new instance of {@link ATan2}. */ public ATan2 atan2of(String fieldReference) { @@ -2750,7 +2745,7 @@ public ATan2 atan2of(String fieldReference) { Assert.notNull(fieldReference, "FieldReference must not be null!"); return new ATan2(append(Fields.field(fieldReference))); } - + /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in * {@link AngularUnit#RADIANS}. @@ -2760,21 +2755,20 @@ public ATan2 atan2of(String fieldReference) { * @return new instance of {@link ATan2}. */ public ATan2 atan2of(AggregationExpression expression) { - + Assert.notNull(expression, "Expression must not be null!"); return new ATan2(append(expression)); } - + /** - * Creates a new {@link AggregationExpression} that calculates the inverse - * tangent of of y / x, where y and x are the first and second values passed to - * the expression respectively. - * + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * * @param value of type {@link Number} * @return new instance of {@link ATan2}. */ public ATan2 atan2of(Number value) { - + return new ATan2(append(value)); } @@ -2818,8 +2812,6 @@ public static Tanh tanhOf(String fieldReference) { * { $tanh : { $degreesToRadians : "$angle" } } * * - * . - * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Tanh}. @@ -2888,11 +2880,12 @@ protected String getMongoMethod() { return "$tanh"; } } - + /** - * An {@link AggregationExpression expression} that calculates the inverse - * hyperbolic tangent of a value + * An {@link AggregationExpression expression} that calculates the inverse hyperbolic tangent of a value * + * @author Divya Srivastava + * @since 3.3 */ public static class ATanh extends AbstractAggregationExpression { @@ -2913,12 +2906,10 @@ public static ATanh atanhOf(String fieldReference) { } /** - * Creates a new {@link AggregationExpression} that calculates the inverse - * hyperbolic tangent of a value. + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a value. *

- * - * @param expression the {@link AggregationExpression expression} that resolves - * to a numeric value. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link ATanh}. */ public static ATanh atanhOf(AggregationExpression expression) { @@ -2933,7 +2924,7 @@ public static ATanh atanhOf(AggregationExpression expression) { * expression}, ...) that resolves to a numeric value. * @return new instance of {@link ATanh}. */ - public static ATanh atanhof(Object value) { + public static ATanh atanhOf(Object value) { return new ATanh(value); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index 8a52a8a2f5..84d228f75e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -30,6 +30,7 @@ * @author Christoph Strobl * @author Mark Paluch * @author Mushtaq Ahmed + * @author Divya Srivastava */ class ArithmeticOperatorsUnitTests { @@ -86,8 +87,7 @@ void rendersIntegralWithUnit() { @Test // GH-3728 void rendersSin() { - assertThat(valueOf("angle").sin().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $sin : \"$angle\" }"); + assertThat(valueOf("angle").sin().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $sin : \"$angle\" }"); } @Test // GH-3728 @@ -100,8 +100,7 @@ void rendersSinWithValueInDegrees() { @Test // GH-3728 void rendersSinh() { - assertThat(valueOf("angle").sinh().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $sinh : \"$angle\" }"); + assertThat(valueOf("angle").sinh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $sinh : \"$angle\" }"); } @Test // GH-3728 @@ -114,8 +113,7 @@ void rendersSinhWithValueInDegrees() { @Test // GH-3710 void rendersCos() { - assertThat(valueOf("angle").cos().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $cos : \"$angle\" }"); + assertThat(valueOf("angle").cos().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $cos : \"$angle\" }"); } @Test // GH-3710 @@ -128,8 +126,7 @@ void rendersCosWithValueInDegrees() { @Test // GH-3710 void rendersCosh() { - assertThat(valueOf("angle").cosh().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $cosh : \"$angle\" }"); + assertThat(valueOf("angle").cosh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $cosh : \"$angle\" }"); } @Test // GH-3710 @@ -142,8 +139,7 @@ void rendersCoshWithValueInDegrees() { @Test // GH-3730 void rendersTan() { - assertThat(valueOf("angle").tan().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $tan : \"$angle\" }"); + assertThat(valueOf("angle").tan().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $tan : \"$angle\" }"); } @Test // GH-3730 @@ -156,8 +152,7 @@ void rendersTanWithValueInDegrees() { @Test // GH-3730 void rendersTanh() { - assertThat(valueOf("angle").tanh().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $tanh : \"$angle\" }"); + assertThat(valueOf("angle").tanh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $tanh : \"$angle\" }"); } @Test // GH-3730 @@ -166,28 +161,25 @@ void rendersTanhWithValueInDegrees() { assertThat(valueOf("angle").tanh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo("{ $tanh : { $degreesToRadians : \"$angle\" } }"); } - - @Test // DATAMONGO - 3709 + + @Test // GH-3709 void rendersATan() { - - assertThat(valueOf("field").atan().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $atan : \"$field\" }"); + + assertThat(valueOf("field").atan().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $atan : \"$field\" }"); } - - @Test // DATAMONGO - 3709 + + @Test // GH-3709 void rendersATan2() { - + assertThat(valueOf("field1").atan2("field2").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $atan2 : [ \"$field1\" , \"$field2\" ] }"); + .isEqualTo("{ $atan2 : [ \"$field1\" , \"$field2\" ] }"); } - - @Test // DATAMONGO - 3709 + + @Test // GH-3709 void rendersATanh() { - - assertThat(valueOf("field").atanh().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $atanh : \"$field\" }"); - } + assertThat(valueOf("field").atanh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $atanh : \"$field\" }"); + } @Test // GH-3724 void rendersRand() { diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index 75ed415096..387a0acf65 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -85,7 +85,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `count` (+++*+++), `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators -| `abs`, `add` (+++*+++ via `plus`), `ceil`, `cos`, `cosh`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `sin`, `sinh`, `tan`, `tanh`, `trunc` +| `abs`, `add` (+++*+++ via `plus`), `atan`, `atan2`, `atanh`, `ceil`, `cos`, `cosh`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `sin`, `sinh`, `tan`, `tanh`, `trunc` | String Aggregation Operators | `concat`, `substr`, `toLower`, `toUpper`, `strcasecmp`, `indexOfBytes`, `indexOfCP`, `regexFind`, `regexFindAll`, `regexMatch`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` From 59d0042d13a8af35a84a48f992c190bed00006b0 Mon Sep 17 00:00:00 2001 From: divyajnu08 Date: Wed, 1 Sep 2021 13:02:43 +0530 Subject: [PATCH 079/885] Add support for `$asin` and `$asinh` aggregation operators. Closes #3708 Original pull request: #3796. --- .../core/aggregation/ArithmeticOperators.java | 116 ++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 2 + .../ArithmeticOperatorsUnitTests.java | 14 +++ .../SpelExpressionTransformerUnitTests.java | 10 ++ 4 files changed, 142 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index d21d985882..d865d57a7d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -735,6 +735,24 @@ public Sinh sinh(AngularUnit unit) { return usesFieldRef() ? Sinh.sinhOf(fieldReference, unit) : Sinh.sinhOf(expression, unit); } + /** + * Creates new {@link AggregationExpression} that calculates the inverse sine of a numeric value. + * + * @return new instance of {@link ASin}. + */ + public ASin asin() { + return usesFieldRef() ? ASin.asinOf(fieldReference) : ASin.asinOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a numeric value. + * + * @return new instance of {@link ASinh}. + */ + public ASinh asinh() { + return usesFieldRef() ? ASinh.asinhOf(fieldReference) : ASinh.asinhOf(expression); + } + /** * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value given in * {@link AngularUnit#RADIANS radians}. @@ -2339,6 +2357,104 @@ protected String getMongoMethod() { return "$sinh"; } } + + /** + * An {@link AggregationExpression expression} that calculates the inverse sine of a value. + * + */ + public static class ASin extends AbstractAggregationExpression { + + private ASin(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new ASin(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. + *

+ * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(AggregationExpression expression) { + return new ASin(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(Number value) { + return new ASin(value); + } + + @Override + protected String getMongoMethod() { + return "$asin"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse hyperbolic sine of a value + */ + public static class ASinh extends AbstractAggregationExpression { + + private ASinh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(String fieldReference) { + return new ASinh(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. + *

+ * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(AggregationExpression expression) { + return new ASinh(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(Object value) { + return new ASinh(value); + } + + @Override + protected String getMongoMethod() { + return "$asinh"; + } + } + /** * An {@link AggregationExpression expression} that calculates the cosine of a value that is measured in radians. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 0f27c463e2..dc7a3cc982 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -95,6 +95,8 @@ public class MethodReferenceNode extends ExpressionNode { map.put("integral", mapArgRef().forOperator("$integral").mappingParametersTo("input", "unit")); map.put("sin", singleArgRef().forOperator("$sin")); map.put("sinh", singleArgRef().forOperator("$sinh")); + map.put("asin", singleArgRef().forOperator("$asin")); + map.put("asinh", singleArgRef().forOperator("$asinh")); map.put("cos", singleArgRef().forOperator("$cos")); map.put("cosh", singleArgRef().forOperator("$cosh")); map.put("tan", singleArgRef().forOperator("$tan")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index 84d228f75e..d0f50d2baf 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -109,6 +109,20 @@ void rendersSinhWithValueInDegrees() { assertThat(valueOf("angle").sinh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo("{ $sinh : { $degreesToRadians : \"$angle\" } }"); } + + @Test // DATAMONGO - 3708 + void rendersASin() { + + assertThat(valueOf("field").asin().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $asin : \"$field\" }"); + } + + @Test // DATAMONGO - 3708 + void rendersASinh() { + + assertThat(valueOf("field").asinh().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $asinh : \"$field\" }"); + } @Test // GH-3710 void rendersCos() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index c9ba9c12e7..8077f604e5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1078,6 +1078,16 @@ void shouldRenderSin() { void shouldRenderSinh() { assertThat(transform("sinh(angle)")).isEqualTo("{ \"$sinh\" : \"$angle\"}"); } + + @Test // DATAMONGO-3708 + void shouldRenderASin() { + assertThat(transform("asin(number)")).isEqualTo("{ \"$asin\" : \"$number\"}"); + } + + @Test // DATAMONGO-3708 + void shouldRenderASinh() { + assertThat(transform("asinh(number)")).isEqualTo("{ \"$asinh\" : \"$number\"}"); + } @Test // GH-3710 void shouldRenderCos() { From dcf184888e88f1ae4a205df15b04b4b7d63a0880 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 7 Sep 2021 09:56:18 +0200 Subject: [PATCH 080/885] Polishing. Add since and author tags. Update reference docs. Fix format of ticket references in tests. See #3708 Original pull request: #3796. --- .../core/aggregation/ArithmeticOperators.java | 18 ++++++++++++------ .../ArithmeticOperatorsUnitTests.java | 16 ++++++---------- .../SpelExpressionTransformerUnitTests.java | 12 ++++++------ .../reference/aggregation-framework.adoc | 2 +- 4 files changed, 25 insertions(+), 23 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index d865d57a7d..9c9132e679 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -739,20 +739,22 @@ public Sinh sinh(AngularUnit unit) { * Creates new {@link AggregationExpression} that calculates the inverse sine of a numeric value. * * @return new instance of {@link ASin}. + * @since 3.3 */ public ASin asin() { return usesFieldRef() ? ASin.asinOf(fieldReference) : ASin.asinOf(expression); } - + /** * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a numeric value. * * @return new instance of {@link ASinh}. + * @since 3.3 */ public ASinh asinh() { return usesFieldRef() ? ASinh.asinhOf(fieldReference) : ASinh.asinhOf(expression); } - + /** * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value given in * {@link AngularUnit#RADIANS radians}. @@ -2357,10 +2359,12 @@ protected String getMongoMethod() { return "$sinh"; } } - + /** * An {@link AggregationExpression expression} that calculates the inverse sine of a value. * + * @author Divya Srivastava + * @since 3.3 */ public static class ASin extends AbstractAggregationExpression { @@ -2407,9 +2411,12 @@ protected String getMongoMethod() { return "$asin"; } } - + /** * An {@link AggregationExpression expression} that calculates the inverse hyperbolic sine of a value + * + * @author Divya Srivastava + * @since 3.3 */ public static class ASinh extends AbstractAggregationExpression { @@ -2430,7 +2437,7 @@ public static ASinh asinhOf(String fieldReference) { /** * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. *

- * + * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link ASinh}. */ @@ -2884,7 +2891,6 @@ public ATan2 atan2of(AggregationExpression expression) { * @return new instance of {@link ATan2}. */ public ATan2 atan2of(Number value) { - return new ATan2(append(value)); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index d0f50d2baf..ab3d1c2400 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -109,19 +109,15 @@ void rendersSinhWithValueInDegrees() { assertThat(valueOf("angle").sinh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo("{ $sinh : { $degreesToRadians : \"$angle\" } }"); } - - @Test // DATAMONGO - 3708 - void rendersASin() { - assertThat(valueOf("field").asin().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $asin : \"$field\" }"); + @Test // GH-3708 + void rendersASin() { + assertThat(valueOf("field").asin().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $asin : \"$field\" }"); } - - @Test // DATAMONGO - 3708 - void rendersASinh() { - assertThat(valueOf("field").asinh().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $asinh : \"$field\" }"); + @Test // GH-3708 + void rendersASinh() { + assertThat(valueOf("field").asinh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $asinh : \"$field\" }"); } @Test // GH-3710 diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 8077f604e5..899e02a172 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1078,13 +1078,13 @@ void shouldRenderSin() { void shouldRenderSinh() { assertThat(transform("sinh(angle)")).isEqualTo("{ \"$sinh\" : \"$angle\"}"); } - - @Test // DATAMONGO-3708 + + @Test // GH-3708 void shouldRenderASin() { assertThat(transform("asin(number)")).isEqualTo("{ \"$asin\" : \"$number\"}"); } - @Test // DATAMONGO-3708 + @Test // GH-3708 void shouldRenderASinh() { assertThat(transform("asinh(number)")).isEqualTo("{ \"$asinh\" : \"$number\"}"); } @@ -1108,17 +1108,17 @@ void shouldRenderTan() { void shouldRenderTanh() { assertThat(transform("tanh(angle)")).isEqualTo("{ \"$tanh\" : \"$angle\"}"); } - + @Test // DATAMONGO - 3709 void shouldRenderATan() { assertThat(transform("atan(number)")).isEqualTo("{ \"$atan\" : \"$number\"}"); } - + @Test // DATAMONGO - 3709 void shouldRenderATan2() { assertThat(transform("atan2(number1,number2)")).isEqualTo("{ \"$atan2\" : [ \"$number1\" , \"$number2\" ] }"); } - + @Test // DATAMONGO - 3709 void shouldRenderATanh() { assertThat(transform("atanh(number)")).isEqualTo("{ \"$atanh\" : \"$number\"}"); diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index 387a0acf65..45315cda36 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -85,7 +85,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `count` (+++*+++), `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators -| `abs`, `add` (+++*+++ via `plus`), `atan`, `atan2`, `atanh`, `ceil`, `cos`, `cosh`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `sin`, `sinh`, `tan`, `tanh`, `trunc` +| `abs`, `add` (+++*+++ via `plus`), `asin`, `asin`, `atan`, `atan2`, `atanh`, `ceil`, `cos`, `cosh`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `sin`, `sinh`, `tan`, `tanh`, `trunc` | String Aggregation Operators | `concat`, `substr`, `toLower`, `toUpper`, `strcasecmp`, `indexOfBytes`, `indexOfCP`, `regexFind`, `regexFindAll`, `regexMatch`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` From c8307d5a39d246a245db2866a55cee813edd888d Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 7 Sep 2021 11:07:27 +0200 Subject: [PATCH 081/885] Allow one-to-many style lookups with via `@DocumentReference`. This commit adds support for relational style One-To-Many references using a combination of ReadonlyProperty and @DocumentReference. It allows to link types without explicitly storing the linking values within the document itself. @Document class Publisher { @Id ObjectId id; // ... @ReadOnlyProperty @DocumentReference(lookup="{'publisherId':?#{#self._id} }") List books; } Closes: #3798 Original pull request: #3802. --- .../convert/DefaultReferenceResolver.java | 2 +- .../core/convert/DocumentReferenceSource.java | 63 +++++++++++++++++++ .../core/convert/MappingMongoConverter.java | 16 +++-- .../core/convert/ReferenceLookupDelegate.java | 52 +++++++++++---- .../MongoTemplateDocumentReferenceTests.java | 48 ++++++++++++++ .../reference/document-references.adoc | 56 +++++++++++++++++ 6 files changed, 218 insertions(+), 19 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java index f801b8d990..62e713065f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java @@ -108,6 +108,6 @@ private Object createLazyLoadingProxy(MongoPersistentProperty property, Object s ReferenceLookupDelegate referenceLookupDelegate, LookupFunction lookupFunction, MongoEntityReader entityReader) { return proxyFactory.createLazyLoadingProxy(property, it -> { return referenceLookupDelegate.readReference(it, source, lookupFunction, entityReader); - }, source); + }, source instanceof DocumentReferenceSource ? ((DocumentReferenceSource)source).getTargetSource() : source); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java new file mode 100644 index 0000000000..03e5eb0d5d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java @@ -0,0 +1,63 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import org.springframework.lang.Nullable; + +/** + * The source object to resolve document references upon. Encapsulates the actual source and the reference specific + * values. + * + * @author Christoph Strobl + * @since 3.3 + */ +public class DocumentReferenceSource { + + private final Object self; + + @Nullable private final Object targetSource; + + /** + * Create a new instance of {@link DocumentReferenceSource}. + * + * @param self the entire wrapper object holding references. Must not be {@literal null}. + * @param targetSource the reference value source. + */ + DocumentReferenceSource(Object self, @Nullable Object targetSource) { + + this.self = self; + this.targetSource = targetSource; + } + + /** + * Get the outer document. + * + * @return never {@literal null}. + */ + public Object getSelf() { + return self; + } + + /** + * Get the actual (property specific) reference value. + * + * @return can be {@literal null}. + */ + @Nullable + public Object getTargetSource() { + return targetSource; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index a60c853c33..5a2c3e952a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -38,7 +38,6 @@ import org.bson.types.ObjectId; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; @@ -524,10 +523,6 @@ private void readAssociation(Association association, P MongoPersistentProperty property = association.getInverse(); Object value = documentAccessor.get(property); - if (value == null) { - return; - } - if (property.isDocumentReference() || (!property.isDbReference() && property.findAnnotation(Reference.class) != null)) { @@ -535,17 +530,26 @@ private void readAssociation(Association association, P if (conversionService.canConvert(DocumentPointer.class, property.getActualType())) { + if(value == null) { + return; + } + DocumentPointer pointer = () -> value; // collection like special treatment accessor.setProperty(property, conversionService.convert(pointer, property.getActualType())); } else { + accessor.setProperty(property, - dbRefResolver.resolveReference(property, value, referenceLookupDelegate, context::convert)); + dbRefResolver.resolveReference(property, new DocumentReferenceSource(documentAccessor.getDocument(), documentAccessor.get(property)), referenceLookupDelegate, context::convert)); } return; } + if (value == null) { + return; + } + DBRef dbref = value instanceof DBRef ? (DBRef) value : null; accessor.setProperty(property, dbRefResolver.resolveDbRef(property, dbref, callback, handler)); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java index 3ca730452f..e16f9024b5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java @@ -87,17 +87,20 @@ public ReferenceLookupDelegate( * Read the reference expressed by the given property. * * @param property the reference defining property. Must not be {@literal null}. THe - * @param value the source value identifying to the referenced entity. Must not be {@literal null}. + * @param source the source value identifying to the referenced entity. Must not be {@literal null}. * @param lookupFunction to execute a lookup query. Must not be {@literal null}. * @param entityReader the callback to convert raw source values into actual domain types. Must not be * {@literal null}. * @return can be {@literal null}. */ @Nullable - public Object readReference(MongoPersistentProperty property, Object value, LookupFunction lookupFunction, + public Object readReference(MongoPersistentProperty property, Object source, LookupFunction lookupFunction, MongoEntityReader entityReader) { - DocumentReferenceQuery filter = computeFilter(property, value, spELContext); + Object value = source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getTargetSource() + : source; + + DocumentReferenceQuery filter = computeFilter(property, source, spELContext); ReferenceCollection referenceCollection = computeReferenceContext(property, value, spELContext); Iterable result = lookupFunction.apply(filter, referenceCollection); @@ -196,8 +199,16 @@ private T parseValueOrGet(String value, ParameterBindingContext bindingConte ParameterBindingContext bindingContext(MongoPersistentProperty property, Object source, SpELContext spELContext) { - return new ParameterBindingContext(valueProviderFor(source), spELContext.getParser(), + ValueProvider valueProvider; + if (source instanceof DocumentReferenceSource) { + valueProvider = valueProviderFor(((DocumentReferenceSource) source).getTargetSource()); + } else { + valueProvider = valueProviderFor(source); + } + + return new ParameterBindingContext(valueProvider, spELContext.getParser(), () -> evaluationContextFor(property, source, spELContext)); + } ValueProvider valueProviderFor(Object source) { @@ -212,9 +223,18 @@ ValueProvider valueProviderFor(Object source) { EvaluationContext evaluationContextFor(MongoPersistentProperty property, Object source, SpELContext spELContext) { - EvaluationContext ctx = spELContext.getEvaluationContext(source); - ctx.setVariable("target", source); - ctx.setVariable(property.getName(), source); + Object target = source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getTargetSource() + : source; + + if (target == null) { + target = new Document(); + } + + EvaluationContext ctx = spELContext.getEvaluationContext(target); + ctx.setVariable("target", target); + ctx.setVariable("self", + source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getSelf() : source); + ctx.setVariable(property.getName(), target); return ctx; } @@ -223,22 +243,30 @@ EvaluationContext evaluationContextFor(MongoPersistentProperty property, Object * Compute the query to retrieve linked documents. * * @param property must not be {@literal null}. - * @param value must not be {@literal null}. + * @param source must not be {@literal null}. * @param spELContext must not be {@literal null}. * @return never {@literal null}. */ @SuppressWarnings("unchecked") - DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object value, SpELContext spELContext) { + DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object source, SpELContext spELContext) { DocumentReference documentReference = property.isDocumentReference() ? property.getDocumentReference() : ReferenceEmulatingDocumentReference.INSTANCE; String lookup = documentReference.lookup(); - Document sort = parseValueOrGet(documentReference.sort(), bindingContext(property, value, spELContext), + Object value = source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getTargetSource() + : source; + + Document sort = parseValueOrGet(documentReference.sort(), bindingContext(property, source, spELContext), () -> new Document()); - if (property.isCollectionLike() && value instanceof Collection) { + if (property.isCollectionLike() && (value instanceof Collection || value == null)) { + + if (value == null) { + return new ListDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, source, spELContext)), + sort); + } List ors = new ArrayList<>(); for (Object entry : (Collection) value) { @@ -263,7 +291,7 @@ DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object va return new MapDocumentReferenceQuery(new Document("$or", filterMap.values()), sort, filterMap); } - return new SingleDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, value, spELContext)), sort); + return new SingleDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, source, spELContext)), sort); } enum ReferenceEmulatingDocumentReference implements DocumentReference { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java index d6bcc10e49..06d288d1f5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java @@ -39,6 +39,7 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.ReadOnlyProperty; import org.springframework.data.annotation.Reference; import org.springframework.data.convert.WritingConverter; import org.springframework.data.mongodb.core.convert.LazyLoadingTestUtils; @@ -1049,7 +1050,34 @@ void updateWhenUsingAtReferenceDirectly() { }); assertThat(target).containsEntry("publisher", "p-1"); + } + + @Test // GH-3798 + void allowsOneToMayStyleLookupsUsingSelfVariable() { + + OneToManyStyleBook book1 = new OneToManyStyleBook(); + book1.id = "id-1"; + book1.publisherId = "p-100"; + + OneToManyStyleBook book2 = new OneToManyStyleBook(); + book2.id = "id-2"; + book2.publisherId = "p-200"; + + OneToManyStyleBook book3 = new OneToManyStyleBook(); + book3.id = "id-3"; + book3.publisherId = "p-100"; + + template.save(book1); + template.save(book2); + template.save(book3); + OneToManyStylePublisher publisher = new OneToManyStylePublisher(); + publisher.id = "p-100"; + + template.save(publisher); + + OneToManyStylePublisher target = template.findOne(query(where("id").is(publisher.id)), OneToManyStylePublisher.class); + assertThat(target.books).containsExactlyInAnyOrder(book1, book3); } @Data @@ -1293,4 +1321,24 @@ static class UsingAtReference { @Reference // Publisher publisher; } + + @Data + static class OneToManyStyleBook { + + @Id + String id; + + private String publisherId; + } + + @Data + static class OneToManyStylePublisher { + + @Id + String id; + + @ReadOnlyProperty + @DocumentReference(lookup="{'publisherId':?#{#self._id} }") + List books; + } } diff --git a/src/main/asciidoc/reference/document-references.adoc b/src/main/asciidoc/reference/document-references.adoc index 885d2d6ade..23bc025e80 100644 --- a/src/main/asciidoc/reference/document-references.adoc +++ b/src/main/asciidoc/reference/document-references.adoc @@ -262,6 +262,62 @@ class Publisher { <2> The field value placeholders of the lookup query (like `acc`) is used to form the reference document. ==== +It is also possible to model relational style _One-To-Many_ references using a combination of `@ReadonlyProperty` and `@DocumentReference`. +This approach allows to link types without explicitly storing the linking values within the document itself as shown in the snipped below. + +==== +[source,java] +---- +@Document +class Book { + + @Id + ObjectId id; + String title; + List author; + + ObjectId publisherId; <1> +} + +@Document +class Publisher { + + @Id + ObjectId id; + String acronym; + String name; + + @ReadOnlyProperty <2> + @DocumentReference(lookup="{'publisherId':?#{#self._id} }") <3> + List books; +} +---- + +.`Book` document +[source,json] +---- +{ + "_id" : 9a48e32, + "title" : "The Warded Man", + "author" : ["Peter V. Brett"], + "publisherId" : 8cfb002 +} +---- + +.`Publisher` document +[source,json] +---- +{ + "_id" : 8cfb002, + "acronym" : "DR", + "name" : "Del Rey" +} +---- +<1> Set up the link from `Book` to `Publisher` by storing the `Publisher.id` within the `Book` document. +<2> Mark the property holding the references to be read only. This prevents storing references to individual ``Book``s with the `Publisher` document. +<3> Use the `#self` variable to access values within the `Publisher` document and in this retrieve `Books` with matching `publisherId`. +==== + With all the above in place it is possible to model all kind of associations between entities. Have a look at the non-exhaustive list of samples below to get feeling for what is possible. From 977e5e4c5c877e17a25de14bb47f98f8fa802161 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 8 Sep 2021 13:50:54 +0200 Subject: [PATCH 082/885] Polishing. Tweak reference documentation wording. Extract self/target source dereferencing into utility methods. See: #3798 Original pull request: #3802. --- .../core/convert/DocumentReferenceSource.java | 25 +++++++++++++++++-- .../core/convert/ReferenceLookupDelegate.java | 17 +++---------- .../reference/document-references.adoc | 7 +++--- 3 files changed, 31 insertions(+), 18 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java index 03e5eb0d5d..89d7360e4d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java @@ -28,11 +28,11 @@ public class DocumentReferenceSource { private final Object self; - @Nullable private final Object targetSource; + private final @Nullable Object targetSource; /** * Create a new instance of {@link DocumentReferenceSource}. - * + * * @param self the entire wrapper object holding references. Must not be {@literal null}. * @param targetSource the reference value source. */ @@ -60,4 +60,25 @@ public Object getSelf() { public Object getTargetSource() { return targetSource; } + + /** + * Dereference a {@code targetSource} if it is a {@link DocumentReferenceSource} or return {@code source} otherwise. + * + * @param source + * @return + */ + @Nullable + static Object getTargetSource(Object source) { + return source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getTargetSource() : source; + } + + /** + * Dereference a {@code self} object if it is a {@link DocumentReferenceSource} or return {@code self} otherwise. + * + * @param self + * @return + */ + static Object getSelf(Object self) { + return self instanceof DocumentReferenceSource ? ((DocumentReferenceSource) self).getSelf() : self; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java index e16f9024b5..36ccc23a6b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java @@ -174,7 +174,6 @@ private ReferenceCollection computeReferenceContext(MongoPersistentProperty prop * @param * @return can be {@literal null}. */ - @Nullable @SuppressWarnings("unchecked") private T parseValueOrGet(String value, ParameterBindingContext bindingContext, Supplier defaultValue) { @@ -199,16 +198,10 @@ private T parseValueOrGet(String value, ParameterBindingContext bindingConte ParameterBindingContext bindingContext(MongoPersistentProperty property, Object source, SpELContext spELContext) { - ValueProvider valueProvider; - if (source instanceof DocumentReferenceSource) { - valueProvider = valueProviderFor(((DocumentReferenceSource) source).getTargetSource()); - } else { - valueProvider = valueProviderFor(source); - } + ValueProvider valueProvider = valueProviderFor(DocumentReferenceSource.getTargetSource(source)); return new ParameterBindingContext(valueProvider, spELContext.getParser(), () -> evaluationContextFor(property, source, spELContext)); - } ValueProvider valueProviderFor(Object source) { @@ -232,8 +225,7 @@ EvaluationContext evaluationContextFor(MongoPersistentProperty property, Object EvaluationContext ctx = spELContext.getEvaluationContext(target); ctx.setVariable("target", target); - ctx.setVariable("self", - source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getSelf() : source); + ctx.setVariable("self", DocumentReferenceSource.getSelf(source)); ctx.setVariable(property.getName(), target); return ctx; @@ -255,11 +247,10 @@ DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object so String lookup = documentReference.lookup(); - Object value = source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getTargetSource() - : source; + Object value = DocumentReferenceSource.getTargetSource(source); Document sort = parseValueOrGet(documentReference.sort(), bindingContext(property, source, spELContext), - () -> new Document()); + Document::new); if (property.isCollectionLike() && (value instanceof Collection || value == null)) { diff --git a/src/main/asciidoc/reference/document-references.adoc b/src/main/asciidoc/reference/document-references.adoc index 23bc025e80..b7d55678a5 100644 --- a/src/main/asciidoc/reference/document-references.adoc +++ b/src/main/asciidoc/reference/document-references.adoc @@ -263,7 +263,7 @@ class Publisher { ==== It is also possible to model relational style _One-To-Many_ references using a combination of `@ReadonlyProperty` and `@DocumentReference`. -This approach allows to link types without explicitly storing the linking values within the document itself as shown in the snipped below. +This approach allows link types without storing the linking values within the owning document but rather on the referencing document as shown in the example below. ==== [source,java] @@ -313,8 +313,9 @@ class Publisher { "name" : "Del Rey" } ---- -<1> Set up the link from `Book` to `Publisher` by storing the `Publisher.id` within the `Book` document. -<2> Mark the property holding the references to be read only. This prevents storing references to individual ``Book``s with the `Publisher` document. +<1> Set up the link from `Book` (reference) to `Publisher` (owner) by storing the `Publisher.id` within the `Book` document. +<2> Mark the property holding the references to be readonly. +This prevents storing references to individual ``Book``s with the `Publisher` document. <3> Use the `#self` variable to access values within the `Publisher` document and in this retrieve `Books` with matching `publisherId`. ==== From ada7e199a4dcbc2cea45d6c0e13d5a9cb8fde7b5 Mon Sep 17 00:00:00 2001 From: Oliver Drotbohm Date: Tue, 7 Sep 2021 14:54:12 +0200 Subject: [PATCH 083/885] Properly detect all supported identifier annotations as explicitly annotated. We now simply delegate to AnnotationBasedPersistentProperty.isIdProperty() for the detection of annotated identifiers. The previous, manual identifier check was preventing additional identifier annotations, supported by ABP, to be considered, too. Fixes #3803. --- spring-data-mongodb/pom.xml | 9 +++++++++ .../mapping/BasicMongoPersistentProperty.java | 3 +-- .../BasicMongoPersistentPropertyUnitTests.java | 15 ++++++++++++++- 3 files changed, 24 insertions(+), 3 deletions(-) diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 1f157e75bc..2f73c10eba 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -317,6 +317,15 @@ test + + + + org.jmolecules + jmolecules-ddd + ${jmolecules} + test + + diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java index 87eb56b732..1315757896 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java @@ -22,7 +22,6 @@ import org.bson.types.ObjectId; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.data.annotation.Id; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.model.AnnotationBasedPersistentProperty; @@ -115,7 +114,7 @@ public boolean isIdProperty() { */ @Override public boolean isExplicitIdProperty() { - return isAnnotationPresent(Id.class); + return super.isIdProperty(); } /** diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java index bbcb8dada0..fffa861914 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java @@ -28,9 +28,9 @@ import org.bson.Document; import org.bson.types.ObjectId; +import org.jmolecules.ddd.annotation.Identity; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; - import org.springframework.core.annotation.AliasFor; import org.springframework.data.annotation.Id; import org.springframework.data.mapping.MappingException; @@ -241,6 +241,15 @@ void fieldTypeShouldBeDocumentForPropertiesAnnotatedIdWhenAComplexTypeAndFieldTy assertThat(property.getFieldType()).isEqualTo(Document.class); } + @Test + void considersJMoleculesIdentityExplicitlyAnnotatedIdentifier() { + + MongoPersistentProperty property = getPropertyFor(WithJMoleculesIdentity.class, "identifier"); + + assertThat(property.isIdProperty()).isTrue(); + assertThat(property.isExplicitIdProperty()).isTrue(); + } + private MongoPersistentProperty getPropertyFor(Field field) { return getPropertyFor(entity, field); } @@ -369,4 +378,8 @@ static class WithComplexId { @Id @org.springframework.data.mongodb.core.mapping.Field ComplexId id; } + + static class WithJMoleculesIdentity { + @Identity ObjectId identifier; + } } From cba7eaba4c442c426c3cba15be5d2c7073ebdb16 Mon Sep 17 00:00:00 2001 From: Oliver Drotbohm Date: Tue, 7 Sep 2021 14:54:35 +0200 Subject: [PATCH 084/885] Polishing. Formatting and indentation in parent project's pom.xml. See #3803 --- pom.xml | 8 ++++---- .../mapping/BasicMongoPersistentPropertyUnitTests.java | 3 +-- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/pom.xml b/pom.xml index 5d28c8a5c5..4aa47bbf2b 100644 --- a/pom.xml +++ b/pom.xml @@ -141,11 +141,11 @@ sonatype-libs-snapshot https://oss.sonatype.org/content/repositories/snapshots - false - + false + - true - + true + diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java index fffa861914..66ae0199fc 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java @@ -356,8 +356,7 @@ static class DocumentWithComposedAnnotations { @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) @Id - static @interface ComposedIdAnnotation { - } + static @interface ComposedIdAnnotation {} static class WithStringMongoId { From 061c28f84ac8c8a302dffc6e3d7264d3703a0f6d Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 8 Sep 2021 13:58:37 +0200 Subject: [PATCH 085/885] Polishing. Add ticket reference to tests. See #3803 --- .../core/mapping/BasicMongoPersistentPropertyUnitTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java index 66ae0199fc..d731854a02 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java @@ -241,7 +241,7 @@ void fieldTypeShouldBeDocumentForPropertiesAnnotatedIdWhenAComplexTypeAndFieldTy assertThat(property.getFieldType()).isEqualTo(Document.class); } - @Test + @Test // GH-3803 void considersJMoleculesIdentityExplicitlyAnnotatedIdentifier() { MongoPersistentProperty property = getPropertyFor(WithJMoleculesIdentity.class, "identifier"); From 4e960a968288833f6e8ca6a8ce429eef226972a4 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 8 Sep 2021 09:24:15 +0200 Subject: [PATCH 086/885] Fix document reference on empty reference arrays. This commit fixes an issue caused by empty reference arrays. Closes #3805 Original pull request: #3807. --- .../core/convert/ReferenceLookupDelegate.java | 9 ++++- .../MongoTemplateDocumentReferenceTests.java | 39 +++++++++++++++++++ 2 files changed, 47 insertions(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java index 36ccc23a6b..a2726e6338 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java @@ -19,6 +19,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -122,7 +123,9 @@ private ReferenceCollection computeReferenceContext(MongoPersistentProperty prop // Use the first value as a reference for others in case of collection like if (value instanceof Iterable) { - value = ((Iterable) value).iterator().next(); + + Iterator iterator = ((Iterable) value).iterator(); + value = iterator.hasNext() ? iterator.next() : new Document(); } // handle DBRef value @@ -266,6 +269,10 @@ DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object so ors.add(decoded); } + if(ors.isEmpty()) { + return new ListDocumentReferenceQuery(new Document("_id", new Document("$exists", false)), sort); + } + return new ListDocumentReferenceQuery(new Document("$or", ors), sort); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java index 06d288d1f5..2b96b3dc22 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java @@ -25,6 +25,7 @@ import lombok.Setter; import lombok.ToString; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; @@ -679,6 +680,41 @@ void loadCollectionReferenceWithMissingRefs() { assertThat(result.getSimpleValueRef()).containsExactly(new SimpleObjectRef("ref-2", "me-the-2-referenced-object")); } + @Test // GH-3805 + void loadEmptyCollectionReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + // an empty reference array. + Document source = new Document("_id", "id-1").append("value", "v1").append("simplePreinitializedValueRef", + Collections.emptyList()); + + template.execute(db -> { + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.simplePreinitializedValueRef).isEmpty(); + } + + @Test // GH-3805 + void loadNoExistingCollectionReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + // no reference array at all + Document source = new Document("_id", "id-1").append("value", "v1"); + + template.execute(db -> { + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.simplePreinitializedValueRef).isEmpty(); + } + @Test // GH-3602 void queryForReference() { @@ -1122,6 +1158,9 @@ static class CollectionRefRoot { @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // List simpleValueRef; + @DocumentReference + List simplePreinitializedValueRef = new ArrayList<>(); + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }", sort = "{ '_id' : -1 } ") // List simpleSortedValueRef; From 270456ed81ae3a11d08ec6a3a3bffd8eca9b8d77 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 8 Sep 2021 14:18:17 +0200 Subject: [PATCH 087/885] Polishing. Extract query that yields no hits into constant. Guard Map-typed reference properties against empty $or. See #3805 Original pull request: #3807. --- .../core/convert/ReferenceLookupDelegate.java | 25 +++++++++++++------ .../MongoTemplateDocumentReferenceTests.java | 21 ++++++++++++++++ 2 files changed, 38 insertions(+), 8 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java index a2726e6338..dbbdbe99eb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java @@ -62,6 +62,8 @@ */ public final class ReferenceLookupDelegate { + private static final Document NO_RESULTS_PREDICATE = new Document("_id", new Document("$exists", false)); + private final MappingContext, MongoPersistentProperty> mappingContext; private final SpELContext spELContext; private final ParameterBindingDocumentCodec codec; @@ -262,15 +264,17 @@ DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object so sort); } - List ors = new ArrayList<>(); - for (Object entry : (Collection) value) { + Collection objects = (Collection) value; - Document decoded = codec.decode(lookup, bindingContext(property, entry, spELContext)); - ors.add(decoded); + if (objects.isEmpty()) { + return new ListDocumentReferenceQuery(NO_RESULTS_PREDICATE, sort); } - if(ors.isEmpty()) { - return new ListDocumentReferenceQuery(new Document("_id", new Document("$exists", false)), sort); + List ors = new ArrayList<>(objects.size()); + for (Object entry : objects) { + + Document decoded = codec.decode(lookup, bindingContext(property, entry, spELContext)); + ors.add(decoded); } return new ListDocumentReferenceQuery(new Document("$or", ors), sort); @@ -278,9 +282,14 @@ DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object so if (property.isMap() && value instanceof Map) { - Map filterMap = new LinkedHashMap<>(); + Set> entries = ((Map) value).entrySet(); + if (entries.isEmpty()) { + return new MapDocumentReferenceQuery(NO_RESULTS_PREDICATE, sort, Collections.emptyMap()); + } + + Map filterMap = new LinkedHashMap<>(entries.size()); - for (Entry entry : ((Map) value).entrySet()) { + for (Entry entry : entries) { Document decoded = codec.decode(lookup, bindingContext(property, entry.getValue(), spELContext)); filterMap.put(entry.getKey(), decoded); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java index 2b96b3dc22..c63e7a1115 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java @@ -698,6 +698,24 @@ void loadEmptyCollectionReference() { assertThat(result.simplePreinitializedValueRef).isEmpty(); } + @Test // GH-3805 + void loadEmptyMapReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + // an empty reference array. + Document source = new Document("_id", "id-1").append("value", "v1").append("simplePreinitializedMapRef", + new Document()); + + template.execute(db -> { + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.simplePreinitializedMapRef).isEmpty(); + } + @Test // GH-3805 void loadNoExistingCollectionReference() { @@ -1167,6 +1185,9 @@ static class CollectionRefRoot { @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // Map mapValueRef; + @DocumentReference // + Map simplePreinitializedMapRef = new LinkedHashMap<>(); + @Field("simple-value-ref-annotated-field-name") // @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // List simpleValueRefWithAnnotatedFieldName; From f128e6df152bc559bbae6e07592307d3f3fc402d Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 8 Sep 2021 10:29:02 +0200 Subject: [PATCH 088/885] Fix `@DocumentReference` resolution for properties used in constructor. This commit fixes an issue that prevented referenced entities from being used as constructor arguments. Closes: #3806 Original pull request: #3810. --- .../core/convert/MappingMongoConverter.java | 25 +++-- .../MongoTemplateDocumentReferenceTests.java | 106 ++++++++++++++++++ 2 files changed, 124 insertions(+), 7 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 5a2c3e952a..07709df365 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -530,7 +530,7 @@ private void readAssociation(Association association, P if (conversionService.canConvert(DocumentPointer.class, property.getActualType())) { - if(value == null) { + if (value == null) { return; } @@ -541,7 +541,9 @@ private void readAssociation(Association association, P } else { accessor.setProperty(property, - dbRefResolver.resolveReference(property, new DocumentReferenceSource(documentAccessor.getDocument(), documentAccessor.get(property)), referenceLookupDelegate, context::convert)); + dbRefResolver.resolveReference(property, + new DocumentReferenceSource(documentAccessor.getDocument(), documentAccessor.get(property)), + referenceLookupDelegate, context::convert)); } return; } @@ -875,10 +877,12 @@ protected List createCollection(Collection collection, MongoPersisten if (property.isAssociation()) { List targetCollection = collection.stream().map(it -> { - return documentPointerFactory.computePointer(mappingContext, property, it, property.getActualType()).getPointer(); + return documentPointerFactory.computePointer(mappingContext, property, it, property.getActualType()) + .getPointer(); }).collect(Collectors.toList()); - return writeCollectionInternal(targetCollection, ClassTypeInformation.from(DocumentPointer.class), new ArrayList<>()); + return writeCollectionInternal(targetCollection, ClassTypeInformation.from(DocumentPointer.class), + new ArrayList<>()); } if (property.hasExplicitWriteTarget()) { @@ -931,7 +935,8 @@ protected Bson createMap(Map map, MongoPersistentProperty proper if (property.isDbReference()) { document.put(simpleKey, value != null ? createDBRef(value, property) : null); } else { - document.put(simpleKey, documentPointerFactory.computePointer(mappingContext, property, value, property.getActualType()).getPointer()); + document.put(simpleKey, documentPointerFactory + .computePointer(mappingContext, property, value, property.getActualType()).getPointer()); } } else { @@ -1814,6 +1819,11 @@ public T getPropertyValue(MongoPersistentProperty property) { return (T) dbRefResolver.resolveDbRef(property, dbref, callback, dbRefProxyHandler); } + if (property.isDocumentReference()) { + return (T) dbRefResolver.resolveReference(property, accessor.get(property), referenceLookupDelegate, + context::convert); + } + return super.getPropertyValue(property); } } @@ -2036,7 +2046,7 @@ public S convert(Object source, TypeInformation if (typeHint.isMap()) { - if(ClassUtils.isAssignable(Document.class, typeHint.getType())) { + if (ClassUtils.isAssignable(Document.class, typeHint.getType())) { return (S) documentConverter.convert(this, BsonUtils.asBson(source), typeHint); } @@ -2044,7 +2054,8 @@ public S convert(Object source, TypeInformation return (S) mapConverter.convert(this, BsonUtils.asBson(source), typeHint); } - throw new IllegalArgumentException(String.format("Expected map like structure but found %s", source.getClass())); + throw new IllegalArgumentException( + String.format("Expected map like structure but found %s", source.getClass())); } if (source instanceof DBRef) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java index c63e7a1115..3abd3a3add 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java @@ -733,6 +733,52 @@ void loadNoExistingCollectionReference() { assertThat(result.simplePreinitializedValueRef).isEmpty(); } + @Test // GH-3806 + void resolveReferenceWhenUsedAsCtorArgument() { + + Publisher publisher = new Publisher(); + publisher.id = "p-111"; + publisher.name = "ppp"; + + template.save(publisher); + + WithRequiredArgsCtor source = new WithRequiredArgsCtor("id-1", publisher); + + template.save(source); + + WithRequiredArgsCtor target = template.findOne(query(where("id").is(source.id)), WithRequiredArgsCtor.class); + assertThat(target.publisher).isNotNull(); + } + + @Test // GH-3806 + void resolveLazyReferenceWhenUsedAsCtorArgument() { + + Publisher publisher = new Publisher(); + publisher.id = "p-111"; + publisher.name = "ppp"; + + template.save(publisher); + + WithLazyRequiredArgsCtor source = new WithLazyRequiredArgsCtor("id-1", publisher); + + template.save(source); + + WithLazyRequiredArgsCtor target = template.findOne(query(where("id").is(source.id)), WithLazyRequiredArgsCtor.class); + + // proxy not yet resolved + LazyLoadingTestUtils.assertProxy(target.publisher, (proxy) -> { + + assertThat(proxy.isResolved()).isFalse(); + assertThat(proxy.currentValue()).isNull(); + }); + + // resolve the proxy by invoking a method on it + assertThat(target.getPublisher().getName()).isEqualTo("ppp"); + LazyLoadingTestUtils.assertProxy(target.publisher, (proxy) -> { + assertThat(proxy.isResolved()).isTrue(); + }); + } + @Test // GH-3602 void queryForReference() { @@ -1371,6 +1417,30 @@ static class Publisher { String id; String acronym; String name; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getAcronym() { + return acronym; + } + + public void setAcronym(String acronym) { + this.acronym = acronym; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } } @Data @@ -1401,4 +1471,40 @@ static class OneToManyStylePublisher { @DocumentReference(lookup="{'publisherId':?#{#self._id} }") List books; } + + static class WithRequiredArgsCtor { + + final String id; + + @DocumentReference + final Publisher publisher; + + public WithRequiredArgsCtor(String id, Publisher publisher) { + + this.id = id; + this.publisher = publisher; + } + } + + static class WithLazyRequiredArgsCtor { + + final String id; + + @DocumentReference(lazy = true) + final Publisher publisher; + + public WithLazyRequiredArgsCtor(String id, Publisher publisher) { + + this.id = id; + this.publisher = publisher; + } + + public String getId() { + return id; + } + + public Publisher getPublisher() { + return publisher; + } + } } From 9014f770d8027c4e1bb35fa91b80d16ac4f6e09e Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 8 Sep 2021 13:33:46 +0200 Subject: [PATCH 089/885] Fix slice argument in query fields projection. We now use a Collection instead of an Array to pass on $slice projection values for offset and limit. Closes: #3811 Original pull request: #3812. --- .../data/mongodb/core/query/Field.java | 3 ++- .../data/mongodb/core/MongoTemplateTests.java | 17 +++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Field.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Field.java index 0561bbdca6..02450505b6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Field.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Field.java @@ -15,6 +15,7 @@ */ package org.springframework.data.mongodb.core.query; +import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; @@ -192,7 +193,7 @@ public Field slice(String field, int size) { */ public Field slice(String field, int offset, int size) { - slices.put(field, new Integer[] { offset, size }); + slices.put(field, Arrays.asList(offset, size)); return this; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java index 28cdaa4830..33ae0ef994 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java @@ -3768,6 +3768,23 @@ void shouldFindSubdocumentWithNullCorrectly() { assertThat(loaded).isNotNull(); } + @Test // GH-3811 + public void sliceShouldLimitCollectionValues() { + + DocumentWithCollectionOfSimpleType source = new DocumentWithCollectionOfSimpleType(); + source.id = "id-1"; + source.values = Arrays.asList("spring", "data", "mongodb"); + + template.save(source); + + Criteria criteria = Criteria.where("id").is(source.id); + Query query = Query.query(criteria); + query.fields().slice("values", 0, 1); + DocumentWithCollectionOfSimpleType target = template.findOne(query, DocumentWithCollectionOfSimpleType.class); + + assertThat(target.values).containsExactly("spring"); + } + private AtomicReference createAfterSaveReference() { AtomicReference saved = new AtomicReference<>(); From 8fb0e1326b3a33591fca6c7a6ace8fb2088a91ec Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 8 Sep 2021 10:03:45 +0200 Subject: [PATCH 090/885] Introduce `SessionSynchronization.NEVER` to disable transactional participation. SessionSynchronization.NEVER bypasses all transactional integration in cases where applications do not want to make use of transactions so that transaction inspection overhead is avoided. Closes: #3760 Original Pull Request: #3809 --- .../data/mongodb/MongoDatabaseUtils.java | 3 ++- .../mongodb/ReactiveMongoDatabaseUtils.java | 4 ++++ .../data/mongodb/SessionSynchronization.java | 20 +++++++++++++--- .../mongodb/MongoDatabaseUtilsUnitTests.java | 24 +++++++++++++++++++ .../ReactiveMongoDatabaseUtilsUnitTests.java | 14 +++++++++++ 5 files changed, 61 insertions(+), 4 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java index ba8efa536c..c9342ec4f6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java @@ -104,7 +104,8 @@ private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDa Assert.notNull(factory, "Factory must not be null!"); - if (!TransactionSynchronizationManager.isSynchronizationActive()) { + if (sessionSynchronization == SessionSynchronization.NEVER + || !TransactionSynchronizationManager.isSynchronizationActive()) { return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java index 711947a30d..4699ac56c2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java @@ -138,6 +138,10 @@ private static Mono doGetMongoDatabase(@Nullable String dbName, R Assert.notNull(factory, "DatabaseFactory must not be null!"); + if (sessionSynchronization == SessionSynchronization.NEVER) { + return getMongoDatabaseOrDefault(dbName, factory); + } + return TransactionSynchronizationManager.forCurrentTransaction() .filter(TransactionSynchronizationManager::isSynchronizationActive) // .flatMap(synchronizationManager -> { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java index 2223b82391..144d3d3cb3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java @@ -15,13 +15,20 @@ */ package org.springframework.data.mongodb; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; + /** - * {@link SessionSynchronization} is used along with {@link org.springframework.data.mongodb.core.MongoTemplate} to - * define in which type of transactions to participate if any. + * {@link SessionSynchronization} is used along with {@code MongoTemplate} to define in which type of transactions to + * participate if any. * * @author Christoph Strobl * @author Mark Paluch * @since 2.1 + * @see MongoTemplate#setSessionSynchronization(SessionSynchronization) + * @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization) + * @see ReactiveMongoTemplate#setSessionSynchronization(SessionSynchronization) + * @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization) */ public enum SessionSynchronization { @@ -34,5 +41,12 @@ public enum SessionSynchronization { /** * Synchronize with native MongoDB transactions initiated via {@link MongoTransactionManager}. */ - ON_ACTUAL_TRANSACTION; + ON_ACTUAL_TRANSACTION, + + /** + * Do not participate in ongoing transactions. + * + * @since 3.2.5 + */ + NEVER; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoDatabaseUtilsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoDatabaseUtilsUnitTests.java index 8cb222f0e6..5b0cd81cc2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoDatabaseUtilsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoDatabaseUtilsUnitTests.java @@ -109,6 +109,30 @@ void shouldNotStartSessionWhenNoTransactionOngoing() { verify(dbFactory, never()).withSession(any(ClientSession.class)); } + @Test // GH-3760 + void shouldJustReturnDatabaseIfSessionSynchronizationDisabled() throws Exception { + + when(dbFactory.getMongoDatabase()).thenReturn(db); + + JtaTransactionManager txManager = new JtaTransactionManager(userTransaction); + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) { + + MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.NEVER); + + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isFalse(); + } + }); + + verify(userTransaction).getStatus(); + verifyNoMoreInteractions(userTransaction); + verifyNoInteractions(session); + } + @Test // DATAMONGO-1920 void shouldParticipateInOngoingJtaTransactionWithCommitWhenSessionSychronizationIsAny() throws Exception { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtilsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtilsUnitTests.java index 60a7ff9a47..a7393a1392 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtilsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtilsUnitTests.java @@ -88,6 +88,20 @@ void isTransactionActiveShouldLookupTxForActiveTransactionSynchronizationViaTxMa }).as(StepVerifier::create).expectNext(true).verifyComplete(); } + @Test // GH-3760 + void shouldJustReturnDatabaseIfSessionSynchronizationDisabled() { + + when(databaseFactory.getMongoDatabase()).thenReturn(Mono.just(db)); + + ReactiveMongoDatabaseUtils.getDatabase(databaseFactory, SessionSynchronization.NEVER) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + verify(databaseFactory, never()).getSession(any()); + verify(databaseFactory, never()).withSession(any(ClientSession.class)); + } + @Test // DATAMONGO-2265 void shouldNotStartSessionWhenNoTransactionOngoing() { From a26e78095745ece93ed6711e62f44c1a80ac8a46 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 8 Sep 2021 10:04:58 +0200 Subject: [PATCH 091/885] Reduce allocations in query and update mapping. Introduce EmptyDocument and utility methods in BsonUtils. Avoid entrySet and iterator creation for document iterations/inspections. Relates to: #3760 Original Pull Request: #3809 --- .../data/mongodb/core/MappedDocument.java | 9 ++ .../data/mongodb/core/QueryOperations.java | 2 +- .../core/convert/DocumentAccessor.java | 2 +- .../core/convert/MappingMongoConverter.java | 14 +-- .../mongodb/core/convert/MongoConverter.java | 3 + .../mongodb/core/convert/QueryMapper.java | 53 +++++++---- .../data/mongodb/core/query/Meta.java | 18 +++- .../data/mongodb/core/query/Query.java | 33 +++++-- .../data/mongodb/core/query/TextQuery.java | 13 +-- .../data/mongodb/core/query/Update.java | 30 +++++- .../data/mongodb/util/BsonUtils.java | 63 ++++++++++++ .../data/mongodb/util/EmptyDocument.java | 95 +++++++++++++++++++ .../mongodb/core/MongoTemplateUnitTests.java | 7 +- .../data/mongodb/core/query/QueryTests.java | 16 ++-- .../query/PartTreeMongoQueryUnitTests.java | 7 +- 15 files changed, 298 insertions(+), 67 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/EmptyDocument.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java index 340c11bb99..e3c1f3d64c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java @@ -156,5 +156,14 @@ public Boolean isIsolated() { public List getArrayFilters() { return delegate.getArrayFilters(); } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.query.UpdateDefinition#hasArrayFilters() + */ + @Override + public boolean hasArrayFilters() { + return delegate.hasArrayFilters(); + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java index 1ec8fc9366..e9431aa3d2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java @@ -613,7 +613,7 @@ class UpdateContext extends QueryContext { UpdateContext(MappedDocument update, boolean upsert) { - super(new BasicQuery(new Document(BsonUtils.asMap(update.getIdFilter())))); + super(new BasicQuery(BsonUtils.asDocument(update.getIdFilter()))); this.multi = false; this.upsert = upsert; this.mappedDocument = update; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java index 9c94487a3e..0b31f75341 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java @@ -135,7 +135,7 @@ public Object get(MongoPersistentProperty property) { */ @Nullable public Object getRawId(MongoPersistentEntity entity) { - return entity.hasIdProperty() ? get(entity.getRequiredIdProperty()) : BsonUtils.asMap(document).get("_id"); + return entity.hasIdProperty() ? get(entity.getRequiredIdProperty()) : BsonUtils.get(document, "_id"); } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 07709df365..302c3dad45 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -25,7 +25,6 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; @@ -1325,21 +1324,22 @@ protected Map readMap(ConversionContext context, Bson bson, Type return map; } - for (Entry entry : sourceMap.entrySet()) { + sourceMap.forEach((k, v) -> { - if (typeMapper.isTypeKey(entry.getKey())) { - continue; + if (typeMapper.isTypeKey(k)) { + return; } - Object key = potentiallyUnescapeMapKey(entry.getKey()); + Object key = potentiallyUnescapeMapKey(k); if (!rawKeyType.isAssignableFrom(key.getClass())) { key = doConvert(key, rawKeyType); } - Object value = entry.getValue(); + Object value = v; map.put(key, value == null ? value : context.convert(value, valueType)); - } + + }); return map; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java index 20499d3173..aff1b8d8e0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java @@ -140,6 +140,9 @@ default Object convertId(@Nullable Object id, Class targetType) { if (ObjectId.isValid(id.toString())) { return new ObjectId(id.toString()); } + + // avoid ConversionException as convertToMongoType will return String anyways. + return id; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index e7deb38231..356dd89faa 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -193,12 +193,11 @@ public Document getMappedSort(Document sortObject, @Nullable MongoPersistentEnti Assert.notNull(sortObject, "SortObject must not be null!"); if (sortObject.isEmpty()) { - return new Document(); + return BsonUtils.EMPTY_DOCUMENT; } Document mappedSort = mapFieldsToPropertyNames(sortObject, entity); - mapMetaAttributes(mappedSort, entity, MetaMapping.WHEN_PRESENT); - return mappedSort; + return mapMetaAttributes(mappedSort, entity, MetaMapping.WHEN_PRESENT); } /** @@ -215,42 +214,51 @@ public Document getMappedFields(Document fieldsObject, @Nullable MongoPersistent Assert.notNull(fieldsObject, "FieldsObject must not be null!"); Document mappedFields = mapFieldsToPropertyNames(fieldsObject, entity); - mapMetaAttributes(mappedFields, entity, MetaMapping.FORCE); - return mappedFields; + return mapMetaAttributes(mappedFields, entity, MetaMapping.FORCE); } private Document mapFieldsToPropertyNames(Document fields, @Nullable MongoPersistentEntity entity) { if (fields.isEmpty()) { - return new Document(); + return BsonUtils.EMPTY_DOCUMENT; } Document target = new Document(); - for (Map.Entry entry : BsonUtils.asMap(filterUnwrappedObjects(fields, entity)).entrySet()) { - Field field = createPropertyField(entity, entry.getKey(), mappingContext); + BsonUtils.asMap(filterUnwrappedObjects(fields, entity)).forEach((k, v) -> { + + Field field = createPropertyField(entity, k, mappingContext); if (field.getProperty() != null && field.getProperty().isUnwrapped()) { - continue; + return; } - target.put(field.getMappedKey(), entry.getValue()); - } + target.put(field.getMappedKey(), v); + }); + return target; } - private void mapMetaAttributes(Document source, @Nullable MongoPersistentEntity entity, MetaMapping metaMapping) { + private Document mapMetaAttributes(Document source, @Nullable MongoPersistentEntity entity, + MetaMapping metaMapping) { if (entity == null) { - return; + return source; } if (entity.hasTextScoreProperty() && !MetaMapping.IGNORE.equals(metaMapping)) { + + if (source == BsonUtils.EMPTY_DOCUMENT) { + source = new Document(); + } + MongoPersistentProperty textScoreProperty = entity.getTextScoreProperty(); if (MetaMapping.FORCE.equals(metaMapping) || (MetaMapping.WHEN_PRESENT.equals(metaMapping) && source.containsKey(textScoreProperty.getFieldName()))) { source.putAll(getMappedTextScoreField(textScoreProperty)); } } + + return source; } private Document filterUnwrappedObjects(Document fieldsObject, @Nullable MongoPersistentEntity entity) { @@ -679,7 +687,7 @@ protected final Entry createMapEntry(Field field, @Nullable Obje private Entry createMapEntry(String key, @Nullable Object value) { Assert.hasText(key, "Key must not be null or empty!"); - return Collections.singletonMap(key, value).entrySet().iterator().next(); + return new AbstractMap.SimpleEntry<>(key, value); } private Object createReferenceFor(Object source, MongoPersistentProperty property) { @@ -733,13 +741,13 @@ protected boolean isNestedKeyword(@Nullable Object candidate) { return false; } - Set keys = BsonUtils.asMap((Bson) candidate).keySet(); + Map map = BsonUtils.asMap((Bson) candidate); - if (keys.size() != 1) { + if (map.size() != 1) { return false; } - return isKeyword(keys.iterator().next()); + return isKeyword(map.entrySet().iterator().next().getKey()); } /** @@ -823,11 +831,14 @@ public Keyword(Bson source, String key) { public Keyword(Bson bson) { - Set keys = BsonUtils.asMap(bson).keySet(); - Assert.isTrue(keys.size() == 1, "Can only use a single value Document!"); + Map map = BsonUtils.asMap(bson); + Assert.isTrue(map.size() == 1, "Can only use a single value Document!"); + + Set> entries = map.entrySet(); + Entry entry = entries.iterator().next(); - this.key = keys.iterator().next(); - this.value = BsonUtils.get(bson, key); + this.key = entry.getKey(); + this.value = entry.getValue(); } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java index 2bfddfa2cd..d70a21707f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java @@ -49,8 +49,8 @@ private enum MetaKey { } } - private final Map values = new LinkedHashMap<>(2); - private final Set flags = new LinkedHashSet<>(); + private Map values = Collections.emptyMap(); + private Set flags = Collections.emptySet(); private Integer cursorBatchSize; private Boolean allowDiskUse; @@ -63,8 +63,9 @@ public Meta() {} * @param source */ Meta(Meta source) { - this.values.putAll(source.values); - this.flags.addAll(source.flags); + + this.values = new LinkedHashMap<>(source.values); + this.flags = new LinkedHashSet<>(source.flags); this.cursorBatchSize = source.cursorBatchSize; this.allowDiskUse = source.allowDiskUse; } @@ -158,6 +159,11 @@ public void setCursorBatchSize(int cursorBatchSize) { public boolean addFlag(CursorOption option) { Assert.notNull(option, "CursorOption must not be null!"); + + if (this.flags == Collections.EMPTY_SET) { + this.flags = new LinkedHashSet<>(2); + } + return this.flags.add(option); } @@ -220,6 +226,10 @@ void setValue(String key, @Nullable Object value) { Assert.hasText(key, "Meta key must not be 'null' or blank."); + if (values == Collections.EMPTY_MAP) { + values = new LinkedHashMap<>(2); + } + if (value == null || (value instanceof String && !StringUtils.hasText((String) value))) { this.values.remove(key); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java index 1f54e7049d..ce60798bf5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java @@ -21,6 +21,7 @@ import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; @@ -30,6 +31,7 @@ import java.util.concurrent.TimeUnit; import org.bson.Document; + import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Order; @@ -52,7 +54,7 @@ public class Query { private static final String RESTRICTED_TYPES_KEY = "_$RESTRICTED_TYPES"; - private final Set> restrictedTypes = new HashSet<>(); + private Set> restrictedTypes = Collections.emptySet(); private final Map criteria = new LinkedHashMap<>(); private @Nullable Field fieldSpec = null; private Sort sort = Sort.unsorted(); @@ -235,8 +237,15 @@ public Query restrict(Class type, Class... additionalTypes) { Assert.notNull(type, "Type must not be null!"); Assert.notNull(additionalTypes, "AdditionalTypes must not be null"); + if (restrictedTypes == Collections.EMPTY_SET) { + restrictedTypes = new HashSet<>(1 + additionalTypes.length); + } + restrictedTypes.add(type); - restrictedTypes.addAll(Arrays.asList(additionalTypes)); + + if (additionalTypes.length > 0) { + restrictedTypes.addAll(Arrays.asList(additionalTypes)); + } return this; } @@ -246,6 +255,17 @@ public Query restrict(Class type, Class... additionalTypes) { */ public Document getQueryObject() { + if (criteria.isEmpty() && restrictedTypes.isEmpty()) { + return BsonUtils.EMPTY_DOCUMENT; + } + + if (criteria.size() == 1 && restrictedTypes.isEmpty()) { + + for (CriteriaDefinition definition : criteria.values()) { + return definition.getCriteriaObject(); + } + } + Document document = new Document(); for (CriteriaDefinition definition : criteria.values()) { @@ -263,7 +283,7 @@ public Document getQueryObject() { * @return the field {@link Document}. */ public Document getFieldsObject() { - return this.fieldSpec == null ? new Document() : fieldSpec.getFieldsObject(); + return this.fieldSpec == null ? BsonUtils.EMPTY_DOCUMENT : fieldSpec.getFieldsObject(); } /** @@ -272,13 +292,12 @@ public Document getFieldsObject() { public Document getSortObject() { if (this.sort.isUnsorted()) { - return new Document(); + return BsonUtils.EMPTY_DOCUMENT; } Document document = new Document(); - this.sort.stream()// - .forEach(order -> document.put(order.getProperty(), order.isAscending() ? 1 : -1)); + this.sort.forEach(order -> document.put(order.getProperty(), order.isAscending() ? 1 : -1)); return document; } @@ -557,7 +576,7 @@ public boolean isSorted() { target.limit = source.getLimit(); target.hint = source.getHint(); target.collation = source.getCollation(); - target.restrictedTypes.addAll(source.getRestrictedTypes()); + target.restrictedTypes = new HashSet<>(source.getRestrictedTypes()); if (source.getMeta().hasValues()) { target.setMeta(new Meta(source.getMeta())); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextQuery.java index 9a72b3ffc0..84a5b9d47e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextQuery.java @@ -18,6 +18,8 @@ import java.util.Locale; import org.bson.Document; + +import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.lang.Nullable; /** @@ -157,7 +159,7 @@ public Document getFieldsObject() { return super.getFieldsObject(); } - Document fields = super.getFieldsObject(); + Document fields = BsonUtils.asMutableDocument(super.getFieldsObject()); fields.put(getScoreFieldName(), META_TEXT_SCORE); return fields; @@ -170,15 +172,14 @@ public Document getFieldsObject() { @Override public Document getSortObject() { - Document sort = new Document(); - if (this.sortByScore) { + Document sort = new Document(); sort.put(getScoreFieldName(), META_TEXT_SCORE); + sort.putAll(super.getSortObject()); + return sort; } - sort.putAll(super.getSortObject()); - - return sort; + return super.getSortObject(); } /* diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Update.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Update.java index 34cab18c31..bdea768d31 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Update.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Update.java @@ -56,10 +56,10 @@ public enum Position { } private boolean isolated = false; - private Set keysToUpdate = new HashSet<>(); - private Map modifierOps = new LinkedHashMap<>(); - private Map pushCommandBuilders = new LinkedHashMap<>(1); - private List arrayFilters = new ArrayList<>(); + private final Set keysToUpdate = new HashSet<>(); + private final Map modifierOps = new LinkedHashMap<>(); + private Map pushCommandBuilders = Collections.emptyMap(); + private List arrayFilters = Collections.emptyList(); /** * Static factory method to create an Update using the provided key @@ -193,6 +193,11 @@ public Update push(String key, @Nullable Object value) { public PushOperatorBuilder push(String key) { if (!pushCommandBuilders.containsKey(key)) { + + if (pushCommandBuilders == Collections.EMPTY_MAP) { + pushCommandBuilders = new LinkedHashMap<>(1); + } + pushCommandBuilders.put(key, new PushOperatorBuilder(key)); } return pushCommandBuilders.get(key); @@ -412,6 +417,10 @@ public Update isolated() { */ public Update filterArray(CriteriaDefinition criteria) { + if (arrayFilters == Collections.EMPTY_LIST) { + this.arrayFilters = new ArrayList<>(); + } + this.arrayFilters.add(criteria::getCriteriaObject); return this; } @@ -427,6 +436,10 @@ public Update filterArray(CriteriaDefinition criteria) { */ public Update filterArray(String identifier, Object expression) { + if (arrayFilters == Collections.EMPTY_LIST) { + this.arrayFilters = new ArrayList<>(); + } + this.arrayFilters.add(() -> new Document(identifier, expression)); return this; } @@ -455,6 +468,15 @@ public List getArrayFilters() { return Collections.unmodifiableList(this.arrayFilters); } + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.query.UpdateDefinition#hasArrayFilters() + */ + @Override + public boolean hasArrayFilters() { + return !this.arrayFilters.isEmpty(); + } + /** * This method is not called anymore rather override {@link #addMultiFieldOperation(String, String, Object)}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java index d452ad662f..c540a14603 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java @@ -60,12 +60,26 @@ */ public class BsonUtils { + /** + * The empty document (immutable). This document is serializable. + * + * @since 3.2.5 + */ + public static final Document EMPTY_DOCUMENT = new EmptyDocument(); + @SuppressWarnings("unchecked") @Nullable public static T get(Bson bson, String key) { return (T) asMap(bson).get(key); } + /** + * Return the {@link Bson} object as {@link Map}. Depending on the input type, the return value can be either a casted + * version of {@code bson} or a converted (detached from the original value). + * + * @param bson + * @return + */ public static Map asMap(Bson bson) { if (bson instanceof Document) { @@ -81,6 +95,55 @@ public static Map asMap(Bson bson) { return (Map) bson.toBsonDocument(Document.class, MongoClientSettings.getDefaultCodecRegistry()); } + /** + * Return the {@link Bson} object as {@link Document}. Depending on the input type, the return value can be either a + * casted version of {@code bson} or a converted (detached from the original value). + * + * @param bson + * @return + * @since 3.2.5 + */ + public static Document asDocument(Bson bson) { + + if (bson instanceof Document) { + return (Document) bson; + } + + Map map = asMap(bson); + + if (map instanceof Document) { + return (Document) map; + } + + return new Document(map); + } + + /** + * Return the {@link Bson} object as mutable {@link Document} containing all entries from {@link Bson}. + * + * @param bson + * @return a mutable {@link Document} containing all entries from {@link Bson}. + * @since 3.2.5 + */ + public static Document asMutableDocument(Bson bson) { + + if (bson instanceof EmptyDocument) { + bson = new Document(asDocument(bson)); + } + + if (bson instanceof Document) { + return (Document) bson; + } + + Map map = asMap(bson); + + if (map instanceof Document) { + return (Document) map; + } + + return new Document(map); + } + public static void addToMap(Bson bson, String key, @Nullable Object value) { if (bson instanceof Document) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/EmptyDocument.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/EmptyDocument.java new file mode 100644 index 0000000000..83c95c82e5 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/EmptyDocument.java @@ -0,0 +1,95 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; + +import org.bson.Document; +import org.jetbrains.annotations.Nullable; + +/** + * Empty variant of {@link Document}. + * + * @author Mark Paluch + */ +class EmptyDocument extends Document { + + @Override + public Document append(String key, Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public Object put(String key, Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public Object remove(Object key) { + throw new UnsupportedOperationException(); + } + + @Override + public void putAll(Map map) { + throw new UnsupportedOperationException(); + } + + @Override + public void replaceAll(BiFunction function) { + throw new UnsupportedOperationException(); + } + + @Override + public boolean remove(Object key, Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public boolean replace(String key, Object oldValue, Object newValue) { + throw new UnsupportedOperationException(); + } + + @Nullable + @Override + public Object replace(String key, Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public Set> entrySet() { + return Collections.emptySet(); + } + + @Override + public Collection values() { + return Collections.emptyList(); + } + + @Override + public Set keySet() { + return Collections.emptySet(); + } + + @Override + public void clear() { + throw new UnsupportedOperationException(); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java index 147d2e49c3..b1d3d6a839 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java @@ -101,6 +101,7 @@ import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; import org.springframework.data.mongodb.core.timeseries.Granularity; +import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.lang.Nullable; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.util.CollectionUtils; @@ -1071,7 +1072,7 @@ void doesNotApplyFieldsWhenInterfaceProjectionIsOpen() { template.doFind("star-wars", new Document(), new Document(), Person.class, PersonSpELProjection.class, CursorPreparer.NO_OP_PREPARER); - verify(findIterable).projection(eq(new Document())); + verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT)); } @Test // DATAMONGO-1733, DATAMONGO-2041 @@ -1098,7 +1099,7 @@ void doesNotApplyFieldsWhenTargetIsNotAProjection() { template.doFind("star-wars", new Document(), new Document(), Person.class, Person.class, CursorPreparer.NO_OP_PREPARER); - verify(findIterable).projection(eq(new Document())); + verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT)); } @Test // DATAMONGO-1733 @@ -1107,7 +1108,7 @@ void doesNotApplyFieldsWhenTargetExtendsDomainType() { template.doFind("star-wars", new Document(), new Document(), Person.class, PersonExtended.class, CursorPreparer.NO_OP_PREPARER); - verify(findIterable).projection(eq(new Document())); + verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT)); } @Test // DATAMONGO-1348, DATAMONGO-2264 diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/QueryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/QueryTests.java index 01dddcd084..69da412073 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/QueryTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/QueryTests.java @@ -237,11 +237,8 @@ void clonedQueryShouldNotDependOnCriteriaFromSource() { source.addCriteria(where("From one make ten").is("and two let be.")); Query target = Query.of(source); - compareQueries(target, source); - source.addCriteria(where("Make even three").is("then rich you'll be.")); - - assertThat(target.getQueryObject()).isEqualTo(new Document("From one make ten", "and two let be.")) - .isNotEqualTo(source.getQueryObject()); + assertThat(target.getQueryObject()).containsAllEntriesOf(new Document("From one make ten", "and two let be.")) + .isNotSameAs(source.getQueryObject()); } @Test // DATAMONGO-1783 @@ -353,9 +350,12 @@ void queryOfShouldWorkOnProxiedObjects() { private void compareQueries(Query actual, Query expected) { assertThat(actual.getCollation()).isEqualTo(expected.getCollation()); - assertThat(actual.getSortObject()).isEqualTo(expected.getSortObject()); - assertThat(actual.getFieldsObject()).isEqualTo(expected.getFieldsObject()); - assertThat(actual.getQueryObject()).isEqualTo(expected.getQueryObject()); + assertThat(actual.getSortObject()).hasSameSizeAs(expected.getSortObject()) + .containsAllEntriesOf(expected.getSortObject()); + assertThat(actual.getFieldsObject()).hasSameSizeAs(expected.getFieldsObject()) + .containsAllEntriesOf(expected.getFieldsObject()); + assertThat(actual.getQueryObject()).hasSameSizeAs(expected.getQueryObject()) + .containsAllEntriesOf(expected.getQueryObject()); assertThat(actual.getHint()).isEqualTo(expected.getHint()); assertThat(actual.getLimit()).isEqualTo(expected.getLimit()); assertThat(actual.getSkip()).isEqualTo(expected.getSkip()); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java index c6c1b140cd..9d8400995a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java @@ -31,11 +31,8 @@ import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.beans.factory.annotation.Value; -import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.data.mongodb.core.convert.DbRefResolver; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; @@ -128,7 +125,7 @@ void propagatesRootExceptionForInvalidQuery() { @Test // DATAMONGO-1345, DATAMONGO-1735 void doesNotDeriveFieldSpecForNormalDomainType() { - assertThat(deriveQueryFromMethod("findPersonBy", new Object[0]).getFieldsObject()).isEqualTo(new Document()); + assertThat(deriveQueryFromMethod("findPersonBy", new Object[0]).getFieldsObject()).isEmpty(); } @Test // DATAMONGO-1345 @@ -173,7 +170,7 @@ void doesNotCreateFieldsObjectForOpenProjection() { org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findAllBy"); - assertThat(query.getFieldsObject()).isEqualTo(new Document()); + assertThat(query.getFieldsObject()).isEmpty(); } @Test // DATAMONGO-1865 From d70e459ffe821c4c3b44783ed162d6461732a05c Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Fri, 10 Sep 2021 10:48:22 +0200 Subject: [PATCH 092/885] Upgrade to MongoDB Java Drivers 4.3.2 Closes: #3816 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 4aa47bbf2b..7cb1d10f85 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ multi spring-data-mongodb 2.6.0-SNAPSHOT - 4.3.1 + 4.3.2 ${mongo} 1.19 From 7d6b5ae5fee0ec48dc07f2abce3ab6d342076635 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 10 Sep 2021 15:37:59 +0200 Subject: [PATCH 093/885] Upgrade to Maven Wrapper 3.8.2. See #3818 --- .mvn/wrapper/maven-wrapper.properties | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties index 00d32aab1d..39700a5c4b 100755 --- a/.mvn/wrapper/maven-wrapper.properties +++ b/.mvn/wrapper/maven-wrapper.properties @@ -1 +1,2 @@ -distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip \ No newline at end of file +#Fri Sep 10 15:37:59 CEST 2021 +distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.2/apache-maven-3.8.2-bin.zip From e7150f525ed39ef565c56c45d9c1d965ce728c96 Mon Sep 17 00:00:00 2001 From: divyajnu08 Date: Fri, 10 Sep 2021 10:37:30 +0530 Subject: [PATCH 094/885] Fix update mapping using nested integer keys on map structures. Closes: #3775 Original Pull Request: #3815 --- .../mongodb/core/convert/QueryMapper.java | 5 ++-- .../core/convert/QueryMapperUnitTests.java | 25 +++++++++++++++++++ 2 files changed, 28 insertions(+), 2 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index 356dd89faa..df53f2c21c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -70,6 +70,7 @@ * @author Christoph Strobl * @author Mark Paluch * @author David Julia + * @author Divya Srivastava */ public class QueryMapper { @@ -1032,8 +1033,8 @@ public TypeInformation getTypeHint() { */ protected static class MetadataBackedField extends Field { - private static final Pattern POSITIONAL_PARAMETER_PATTERN = Pattern.compile("\\.\\$(\\[.*?\\])?|\\.\\d+"); - private static final Pattern DOT_POSITIONAL_PATTERN = Pattern.compile("\\.\\d+"); + private static final Pattern POSITIONAL_PARAMETER_PATTERN = Pattern.compile("\\.\\$(\\[.*?\\])?"); + private static final Pattern DOT_POSITIONAL_PATTERN = Pattern.compile("\\.\\d+(?!$)"); private static final String INVALID_ASSOCIATION_REFERENCE = "Invalid path reference %s! Associations can only be pointed to directly or via their id property!"; private final MongoPersistentEntity entity; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index 46db6e7d6a..a54e80fa39 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -61,6 +61,7 @@ import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.TextQuery; +import org.springframework.data.mongodb.core.query.Update; import com.mongodb.BasicDBObject; import com.mongodb.MongoClientSettings; @@ -1354,6 +1355,25 @@ void mapStringIdFieldProjection() { org.bson.Document mappedFields = mapper.getMappedFields(new org.bson.Document("id", 1), context.getPersistentEntity(WithStringId.class)); assertThat(mappedFields).containsEntry("_id", 1); } + + @Test + void mapNestedStringFieldCorrectly() { + Update update = new Update(); + update.set("levelOne.a.b.d", "e"); + org.bson.Document document = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + assertThat(document).isEqualTo(new org.bson.Document("$set",new org.bson.Document("levelOne.a.b.d","e"))); + } + + @Test + void mapNestedIntegerFieldCorrectly() { + Update update = new Update(); + update.set("levelOne.0.1.3", "4"); + org.bson.Document document = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + assertThat(document).isEqualTo(new org.bson.Document("$set",new org.bson.Document("levelOne.0.1.3","4"))); + } + @Test // GH-3783 void retainsId$InWithStringArray() { @@ -1542,6 +1562,11 @@ static class EntityWithIntKeyedMapOfMap{ static class EntityWithComplexValueTypeList { List list; } + + static class EntityWithNestedMap { + Map>> levelOne; + } + static class WithExplicitTargetTypes { From eda1c793157b8883441a43c7bcc629926a7e7206 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 13 Sep 2021 14:25:17 +0200 Subject: [PATCH 095/885] Move and add tests to UpdateMapper. Also update author information. Original Pull Request: #3815 --- .../core/convert/QueryMapperUnitTests.java | 24 -------- .../core/convert/UpdateMapperUnitTests.java | 55 +++++++++++++++++++ 2 files changed, 55 insertions(+), 24 deletions(-) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index a54e80fa39..11ea78fd4d 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -1355,25 +1355,6 @@ void mapStringIdFieldProjection() { org.bson.Document mappedFields = mapper.getMappedFields(new org.bson.Document("id", 1), context.getPersistentEntity(WithStringId.class)); assertThat(mappedFields).containsEntry("_id", 1); } - - @Test - void mapNestedStringFieldCorrectly() { - Update update = new Update(); - update.set("levelOne.a.b.d", "e"); - org.bson.Document document = mapper.getMappedObject(update.getUpdateObject(), - context.getPersistentEntity(EntityWithNestedMap.class)); - assertThat(document).isEqualTo(new org.bson.Document("$set",new org.bson.Document("levelOne.a.b.d","e"))); - } - - @Test - void mapNestedIntegerFieldCorrectly() { - Update update = new Update(); - update.set("levelOne.0.1.3", "4"); - org.bson.Document document = mapper.getMappedObject(update.getUpdateObject(), - context.getPersistentEntity(EntityWithNestedMap.class)); - assertThat(document).isEqualTo(new org.bson.Document("$set",new org.bson.Document("levelOne.0.1.3","4"))); - } - @Test // GH-3783 void retainsId$InWithStringArray() { @@ -1562,11 +1543,6 @@ static class EntityWithIntKeyedMapOfMap{ static class EntityWithComplexValueTypeList { List list; } - - static class EntityWithNestedMap { - Map>> levelOne; - } - static class WithExplicitTargetTypes { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java index bba9811e56..44712fa8d1 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java @@ -68,6 +68,7 @@ * @author Mark Paluch * @author Pavel Vodrazka * @author David Julia + * @author Divya Srivastava */ @ExtendWith(MockitoExtension.class) class UpdateMapperUnitTests { @@ -1200,6 +1201,56 @@ void mapsObjectClassPropertyFieldInMapValueTypeAsKey() { assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"map.class\": \"value\"}}"); } + @Test // GH-3775 + void mapNestedStringFieldCorrectly() { + + Update update = new Update().set("levelOne.a.b.d", "e"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set",new org.bson.Document("levelOne.a.b.d","e"))); + } + + @Test // GH-3775 + void mapNestedIntegerFieldCorrectly() { + + Update update = new Update().set("levelOne.0.1.3", "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set",new org.bson.Document("levelOne.0.1.3","4"))); + } + + @Test // GH-3775 + void mapNestedMixedStringIntegerFieldCorrectly() { + + Update update = new Update().set("levelOne.0.1.c", "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set",new org.bson.Document("levelOne.0.1.c","4"))); + } + + @Test // GH-3775 + void mapNestedMixedStringIntegerWithStartNumberFieldCorrectly() { + + Update update = new Update().set("levelOne.0a.1b.3c", "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set",new org.bson.Document("levelOne.0a.1b.3c","4"))); + } + + @Test // GH-3688 + void multipleKeysStartingWithANumberInNestedPath() { + + Update update = new Update().set("intKeyedMap.1a.map.0b", "testing"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithIntKeyedMap.class)); + + assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"intKeyedMap.1a.map.0b\": \"testing\"}}"); + } + static class DomainTypeWrappingConcreteyTypeHavingListOfInterfaceTypeAttributes { ListModelWrapper concreteTypeWithListAttributeOfInterfaceType; } @@ -1566,4 +1617,8 @@ static class UnwrappableType { String transientValue; } + static class EntityWithNestedMap { + Map>> levelOne; + } + } From 99203b397a27f9cd595eebb028161b9c054dfd68 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 24 Aug 2021 07:06:17 +0200 Subject: [PATCH 096/885] Add support for deriving json schema for encrypted properties. This commit introduces support for creating a MongoJsonSchema containing encrypted fields for a given type based on mapping metadata. Using the Encrypted annotation allows to derive required encryptMetadata and encrypt properties within a given (mapping)context. @Document @Encrypted(keyId = "...") static class Patient { // ... @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") private Integer ssn; } MongoJsonSchemaCreator schemaCreator = MongoJsonSchemaCreator.create(mappingContext); MongoJsonSchema patientSchema = schemaCreator .filter(MongoJsonSchemaCreator.encryptedOnly()) .createSchemaFor(Patient.class); Closes: #3800 Original pull request: #3801. --- .../mongodb/core/EncryptionAlgorithms.java | 29 ++ .../core/MappingMongoJsonSchemaCreator.java | 109 ++++++- .../mongodb/core/MongoJsonSchemaCreator.java | 136 +++++++++ .../mapping/BasicMongoPersistentEntity.java | 38 +++ .../mapping/BasicMongoPersistentProperty.java | 48 ++++ .../data/mongodb/core/mapping/Encrypted.java | 112 ++++++++ .../core/mapping/MongoMappingContext.java | 6 + .../core/mapping/MongoPersistentEntity.java | 9 + .../core/mapping/MongoPersistentProperty.java | 9 + .../UnwrappedMongoPersistentEntity.java | 6 + .../UnwrappedMongoPersistentProperty.java | 6 + .../core/schema/DefaultMongoJsonSchema.java | 36 ++- .../core/schema/DocumentJsonSchema.java | 6 +- .../IdentifiableJsonSchemaProperty.java | 16 +- .../mongodb/core/schema/MongoJsonSchema.java | 32 ++- .../core/schema/TypedJsonSchemaObject.java | 4 + .../util/encryption/EncryptionUtils.java | 67 +++++ .../mongodb/util/spel/ExpressionUtils.java | 52 ++++ ...appingMongoJsonSchemaCreatorUnitTests.java | 272 +++++++++++++++++- .../asciidoc/reference/mongo-json-schema.adoc | 103 +++++++ 20 files changed, 1074 insertions(+), 22 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Encrypted.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/encryption/EncryptionUtils.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/spel/ExpressionUtils.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java new file mode 100644 index 0000000000..0ed7340aa1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java @@ -0,0 +1,29 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +/** + * Encryption algorithms supported by MongoDB Client Side Field Level Encryption. + * + * @author Christoph Strobl + * @since 3.3 + */ +public final class EncryptionAlgorithms { + + public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic"; + public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Random = "AEAD_AES_256_CBC_HMAC_SHA_512-Random"; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java index ecbf8a4f07..a53ff8f5a5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java @@ -20,13 +20,19 @@ import java.util.Collections; import java.util.EnumSet; import java.util.List; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import org.bson.Document; import org.springframework.data.mapping.PersistentProperty; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.Encrypted; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.JsonSchemaObject; import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; @@ -34,10 +40,12 @@ import org.springframework.data.mongodb.core.schema.MongoJsonSchema; import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import org.springframework.util.CollectionUtils; import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; /** * {@link MongoJsonSchemaCreator} implementation using both {@link MongoConverter} and {@link MappingContext} to obtain @@ -52,6 +60,7 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator { private final MongoConverter converter; private final MappingContext, MongoPersistentProperty> mappingContext; + private final Predicate filter; /** * Create a new instance of {@link MappingMongoJsonSchemaCreator}. @@ -61,10 +70,24 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator { @SuppressWarnings("unchecked") MappingMongoJsonSchemaCreator(MongoConverter converter) { + this(converter, (MappingContext, MongoPersistentProperty>) converter.getMappingContext(), + (property) -> true); + } + + @SuppressWarnings("unchecked") + MappingMongoJsonSchemaCreator(MongoConverter converter, + MappingContext, MongoPersistentProperty> mappingContext, + Predicate filter) { + Assert.notNull(converter, "Converter must not be null!"); this.converter = converter; - this.mappingContext = (MappingContext, MongoPersistentProperty>) converter - .getMappingContext(); + this.mappingContext = mappingContext; + this.filter = filter; + } + + @Override + public MongoJsonSchemaCreator filter(Predicate filter) { + return new MappingMongoJsonSchemaCreator(converter, mappingContext, filter); } /* @@ -77,11 +100,29 @@ public MongoJsonSchema createSchemaFor(Class type) { MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(type); MongoJsonSchemaBuilder schemaBuilder = MongoJsonSchema.builder(); + { + Encrypted encrypted = entity.findAnnotation(Encrypted.class); + if (encrypted != null) { + + Document encryptionMetadata = new Document(); + + Collection encryptionKeyIds = entity.getEncryptionKeyIds(); + if (!CollectionUtils.isEmpty(encryptionKeyIds)) { + encryptionMetadata.append("keyId", encryptionKeyIds); + } + + if (StringUtils.hasText(encrypted.algorithm())) { + encryptionMetadata.append("algorithm", encrypted.algorithm()); + } + + schemaBuilder.encryptionMetadata(encryptionMetadata); + } + } + List schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity); schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0])); return schemaBuilder.build(); - } private List computePropertiesForEntity(List path, @@ -93,6 +134,11 @@ private List computePropertiesForEntity(List currentPath = new ArrayList<>(path); + if (!filter.test(new PropertyContext( + currentPath.stream().map(PersistentProperty::getName).collect(Collectors.joining(".")), nested))) { + continue; + } + if (path.contains(nested)) { // cycle guard schemaProperties.add(createSchemaProperty(computePropertyFieldName(CollectionUtils.lastElement(currentPath)), Object.class, false)); @@ -120,15 +166,38 @@ private JsonSchemaProperty computeSchemaForProperty(List path, @@ -207,4 +276,30 @@ static JsonSchemaProperty createPotentiallyRequiredSchemaProperty(JsonSchemaProp return JsonSchemaProperty.required(property); } + + class PropertyContext implements JsonSchemaPropertyContext { + + private String path; + private MongoPersistentProperty property; + + public PropertyContext(String path, MongoPersistentProperty property) { + this.path = path; + this.property = property; + } + + @Override + public String getPath() { + return path; + } + + @Override + public MongoPersistentProperty getProperty() { + return property; + } + + @Override + public MongoPersistentEntity resolveEntity(MongoPersistentProperty property) { + return (MongoPersistentEntity) mappingContext.getPersistentEntity(property); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java index f3c0dcd624..5e5bc50644 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java @@ -15,7 +15,23 @@ */ package org.springframework.data.mongodb.core; +import java.util.HashSet; +import java.util.Set; +import java.util.function.Predicate; + +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.mapping.Unwrapped.Nullable; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; import org.springframework.util.Assert; @@ -46,6 +62,7 @@ * {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more * specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation. *

+ * {@link Encrypted} properties will contain {@literal encrypt} information. * * @author Christoph Strobl * @since 2.2 @@ -60,6 +77,88 @@ public interface MongoJsonSchemaCreator { */ MongoJsonSchema createSchemaFor(Class type); + /** + * Filter matching {@link JsonSchemaProperty properties}. + * + * @param filter the {@link Predicate} to evaluate for inclusion. Must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + MongoJsonSchemaCreator filter(Predicate filter); + + /** + * The context in which a specific {@link #getProperty()} is encountered during schema creation. + * + * @since 3.3 + */ + interface JsonSchemaPropertyContext { + + /** + * The path to a given field/property in dot notation. + * + * @return never {@literal null}. + */ + String getPath(); + + /** + * The current property. + * + * @return never {@literal null}. + */ + MongoPersistentProperty getProperty(); + + /** + * Obtain the {@link MongoPersistentEntity} for a given property. + * + * @param property must not be {@literal null}. + * @param + * @return {@literal null} if the property is not an entity. It is nevertheless recommend to check + * {@link PersistentProperty#isEntity()} first. + */ + @Nullable + MongoPersistentEntity resolveEntity(MongoPersistentProperty property); + + } + + /** + * A filter {@link Predicate} that matches {@link Encrypted encrypted properties} and those having nested ones. + * + * @return new instance of {@link Predicate}. + * @since 3.3 + */ + static Predicate encryptedOnly() { + + return new Predicate() { + + // cycle guard + private final Set seen = new HashSet<>(); + + @Override + public boolean test(JsonSchemaPropertyContext context) { + return extracted(context.getProperty(), context); + } + + private boolean extracted(MongoPersistentProperty property, JsonSchemaPropertyContext context) { + if (property.isAnnotationPresent(Encrypted.class)) { + return true; + } + + if (!property.isEntity() || seen.contains(property)) { + return false; + } + + seen.add(property); + + for (MongoPersistentProperty nested : context.resolveEntity(property)) { + if (extracted(nested, context)) { + return true; + } + } + return false; + } + }; + } + /** * Creates a new {@link MongoJsonSchemaCreator} that is aware of conversions applied by the given * {@link MongoConverter}. @@ -72,4 +171,41 @@ static MongoJsonSchemaCreator create(MongoConverter mongoConverter) { Assert.notNull(mongoConverter, "MongoConverter must not be null!"); return new MappingMongoJsonSchemaCreator(mongoConverter); } + + /** + * Creates a new {@link MongoJsonSchemaCreator} that is aware of type mappings and potential + * {@link org.springframework.data.spel.spi.EvaluationContextExtension extensions}. + * + * @param mappingContext must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + static MongoJsonSchemaCreator create(MappingContext mappingContext) { + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(config -> {})); + converter.afterPropertiesSet(); + + return create(converter); + } + + /** + * Creates a new {@link MongoJsonSchemaCreator} that does not consider potential extensions - suitable for testing. We + * recommend to use {@link #create(MappingContext)}. + * + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + static MongoJsonSchemaCreator create() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setSimpleTypeHolder(MongoSimpleTypes.HOLDER); + mappingContext.afterPropertiesSet(); + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(config -> {})); + converter.afterPropertiesSet(); + + return create(converter); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntity.java index 7bf8214aeb..6840fce5bf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntity.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntity.java @@ -17,8 +17,12 @@ import java.lang.reflect.Field; import java.lang.reflect.Modifier; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; +import java.util.List; import java.util.Map; import org.springframework.data.annotation.Id; @@ -28,6 +32,9 @@ import org.springframework.data.mapping.PropertyHandler; import org.springframework.data.mapping.model.BasicPersistentEntity; import org.springframework.data.mongodb.MongoCollectionUtils; +import org.springframework.data.mongodb.util.encryption.EncryptionUtils; +import org.springframework.data.spel.ExpressionDependencies; +import org.springframework.data.util.Lazy; import org.springframework.data.util.TypeInformation; import org.springframework.expression.EvaluationContext; import org.springframework.expression.Expression; @@ -212,6 +219,11 @@ public EvaluationContext getEvaluationContext(Object rootObject) { return super.getEvaluationContext(rootObject); } + @Override + public EvaluationContext getEvaluationContext(Object rootObject, ExpressionDependencies dependencies) { + return super.getEvaluationContext(rootObject, dependencies); + } + private void verifyFieldUniqueness() { AssertFieldNameUniquenessHandler handler = new AssertFieldNameUniquenessHandler(); @@ -360,6 +372,32 @@ private void assertUniqueness(MongoPersistentProperty property) { } } + @Override + public Collection getEncryptionKeyIds() { + + Encrypted encrypted = findAnnotation(Encrypted.class); + if (encrypted == null) { + return null; + } + + if (ObjectUtils.isEmpty(encrypted.keyId())) { + return Collections.emptySet(); + } + + Lazy evaluationContext = Lazy.of(() -> { + + EvaluationContext ctx = getEvaluationContext(null); + ctx.setVariable("target", getType().getSimpleName()); + return ctx; + }); + + List target = new ArrayList<>(); + for (String keyId : encrypted.keyId()) { + target.add(EncryptionUtils.resolveKeyId(keyId, evaluationContext)); + } + return target; + } + /** * @author Christoph Strobl * @since 1.6 diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java index 1315757896..cf74d696a8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java @@ -16,7 +16,11 @@ package org.springframework.data.mongodb.core.mapping; import java.math.BigInteger; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Set; import org.bson.types.ObjectId; @@ -29,7 +33,12 @@ import org.springframework.data.mapping.model.Property; import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.mongodb.util.encryption.EncryptionUtils; +import org.springframework.data.util.Lazy; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.spel.support.StandardEvaluationContext; import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; /** @@ -299,4 +308,43 @@ public boolean isTextScoreProperty() { return isAnnotationPresent(TextScore.class); } + /** + * Obtain the {@link EvaluationContext} for a specific root object. + * + * @param rootObject can be {@literal null}. + * @return never {@literal null}. + * @since 3.3 + */ + public EvaluationContext getEvaluationContext(@Nullable Object rootObject) { + + if (getOwner() instanceof BasicMongoPersistentEntity) { + return ((BasicMongoPersistentEntity) getOwner()).getEvaluationContext(rootObject); + } + return rootObject != null ? new StandardEvaluationContext(rootObject) : new StandardEvaluationContext(); + } + + @Override + public Collection getEncryptionKeyIds() { + + Encrypted encrypted = findAnnotation(Encrypted.class); + if (encrypted == null) { + return null; + } + + if (ObjectUtils.isEmpty(encrypted.keyId())) { + return Collections.emptySet(); + } + + Lazy evaluationContext = Lazy.of(() -> { + EvaluationContext ctx = getEvaluationContext(null); + ctx.setVariable("target", getOwner().getType().getSimpleName() + "." + getName()); + return ctx; + }); + + List target = new ArrayList<>(); + for (String keyId : encrypted.keyId()) { + target.add(EncryptionUtils.resolveKeyId(keyId, evaluationContext)); + } + return target; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Encrypted.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Encrypted.java new file mode 100644 index 0000000000..8bd0f99c41 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Encrypted.java @@ -0,0 +1,112 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * {@link Encrypted} provides data required for MongoDB Client Side Field Level Encryption that is applied during schema + * resolution. It can be applied on top level (typically those types annotated with {@link Document} to provide the + * {@literal encryptMetadata}. + * + *
+ * @Document
+ * @Encrypted(keyId = "4fPYFM9qSgyRAjgQ2u+IMQ==")
+ * public class Patient {
+ * 	 private ObjectId id;
+ * 	 private String name;
+ *
+ * 	 @Field("publisher_ac")
+ * 	 @DocumentReference(lookup = "{ 'acronym' : ?#{#target} }") private Publisher publisher;
+ * }
+ *
+ * "encryptMetadata": {
+ *    "keyId": [
+ *      {
+ *        "$binary": {
+ *          "base64": "4fPYFM9qSgyRAjgQ2u+IMQ==",
+ *          "subType": "04"
+ *        }
+ *      }
+ *    ]
+ *  }
+ * 
+ * + *
+ * On property level it is used for deriving field specific {@literal encrypt} settings. + * + *
+ * public class Patient {
+ * 	 private ObjectId id;
+ * 	 private String name;
+ *
+ * 	 @Encrypted(keyId = "4fPYFM9qSgyRAjgQ2u+IMQ==", algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic")
+ * 	 private String ssn;
+ * }
+ *
+ * "ssn" : {
+ *   "encrypt": {
+ *      "keyId": [
+ *        {
+ *          "$binary": {
+ *            "base64": "4fPYFM9qSgyRAjgQ2u+IMQ==",
+ *            "subType": "04"
+ *          }
+ *        }
+ *      ],
+ *      "algorithm" : "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic",
+ *      "bsonType" : "string"
+ *    }
+ *  }
+ * 
+ * + * @author Christoph Strobl + * @since 3.3 + */ +@Documented +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.FIELD }) +public @interface Encrypted { + + /** + * Get the {@code keyId} to use. The value must resolve to either the UUID representation of the key or a base64 + * encoded value representing the UUID value. + *

+ * On {@link ElementType#TYPE} level the {@link #keyId()} can be left empty if explicitly set for fields.
+ * On {@link ElementType#FIELD} level the {@link #keyId()} can be left empty if inherited from + * {@literal encryptMetadata}. + * + * @return the key id to use. May contain a parsable {@link org.springframework.expression.Expression expression}. In + * this case the {@code #target} variable will hold the target element name. + */ + String[] keyId() default {}; + + /** + * Set the algorithm to use. + *

+ * On {@link ElementType#TYPE} level the {@link #algorithm()} can be left empty if explicitly set for fields.
+ * On {@link ElementType#FIELD} level the {@link #algorithm()} can be left empty if inherited from + * {@literal encryptMetadata}. + * + * @return the encryption algorithm. + * @see org.springframework.data.mongodb.core.EncryptionAlgorithms + */ + String algorithm() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java index 121658b065..674ea74f3a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java @@ -46,6 +46,9 @@ public class MongoMappingContext extends AbstractMappingContext BasicMongoPersistentEntity createPersistentEntity(TypeInformati */ @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { + + this.applicationContext = applicationContext; super.setApplicationContext(applicationContext); } @@ -145,4 +150,5 @@ public MongoPersistentEntity getPersistentEntity(MongoPersistentProperty pers return new UnwrappedMongoPersistentEntity<>(entity, new UnwrapEntityContext(persistentProperty)); } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentEntity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentEntity.java index deb69eab36..d9b5ae0bd4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentEntity.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentEntity.java @@ -15,6 +15,8 @@ */ package org.springframework.data.mongodb.core.mapping; +import java.util.Collection; + import org.springframework.data.mapping.PersistentEntity; import org.springframework.data.mapping.model.MutablePersistentEntity; import org.springframework.lang.Nullable; @@ -102,4 +104,11 @@ default boolean isUnwrapped() { return false; } + /** + * @return the resolved encryption keyIds if applicable. An empty {@link Collection} if no keyIds specified. + * {@literal null} no {@link Encrypted} annotation found. + * @since 3.3 + */ + @Nullable + Collection getEncryptionKeyIds(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java index 2bd387d74c..8dc89e03f9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java @@ -15,6 +15,8 @@ */ package org.springframework.data.mongodb.core.mapping; +import java.util.Collection; + import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; import org.springframework.data.mapping.PersistentEntity; @@ -160,6 +162,13 @@ default boolean isUnwrapped() { return isEntity() && isAnnotationPresent(Unwrapped.class); } + /** + * @return the resolved encryption keyIds if applicable. An empty {@link Collection} if no keyIds specified. + * {@literal null} no {@link Encrypted} annotation found. + * @since 3.3 + */ + Collection getEncryptionKeyIds(); + /** * Simple {@link Converter} implementation to transform a {@link MongoPersistentProperty} into its field name. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentEntity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentEntity.java index 6a60168e91..f85c73cae0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentEntity.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentEntity.java @@ -17,6 +17,7 @@ import java.lang.annotation.Annotation; import java.util.ArrayList; +import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Spliterator; @@ -323,4 +324,9 @@ public void setEvaluationContextProvider(EvaluationContextProvider provider) { public boolean isUnwrapped() { return context.getProperty().isUnwrapped(); } + + @Override + public Collection getEncryptionKeyIds() { + return delegate.getEncryptionKeyIds(); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java index a2194c173f..24e4ae057f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java @@ -18,6 +18,7 @@ import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.lang.reflect.Method; +import java.util.Collection; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.PersistentEntity; @@ -268,6 +269,11 @@ public boolean isUnwrapped() { return delegate.isUnwrapped(); } + @Override + public Collection getEncryptionKeyIds() { + return delegate.getEncryptionKeyIds(); + } + @Override @Nullable public Class getComponentType() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DefaultMongoJsonSchema.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DefaultMongoJsonSchema.java index 1b05840913..f77e4290ec 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DefaultMongoJsonSchema.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DefaultMongoJsonSchema.java @@ -16,7 +16,9 @@ package org.springframework.data.mongodb.core.schema; import org.bson.Document; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; /** * Value object representing a MongoDB-specific JSON schema which is the default {@link MongoJsonSchema} implementation. @@ -29,18 +31,44 @@ class DefaultMongoJsonSchema implements MongoJsonSchema { private final JsonSchemaObject root; + @Nullable // + private final Document encryptionMetadata; + DefaultMongoJsonSchema(JsonSchemaObject root) { + this(root, null); + } + + /** + * Create new instance of {@link DefaultMongoJsonSchema}. + * + * @param root the schema root element. + * @param encryptionMetadata can be {@literal null}. + * @since 3.3 + */ + DefaultMongoJsonSchema(JsonSchemaObject root, @Nullable Document encryptionMetadata) { + + Assert.notNull(root, "Root schema object must not be null!"); - Assert.notNull(root, "Root must not be null!"); this.root = root; + this.encryptionMetadata = encryptionMetadata; } /* * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.MongoJsonSchema#toDocument() + * @see org.springframework.data.mongodb.core.schema.MongoJsonSchema#schema() */ @Override - public Document toDocument() { - return new Document("$jsonSchema", root.toDocument()); + public Document schemaDocument() { + + Document schemaDocument = new Document(); + + // we want this to be the first element rendered, so it reads nice when printed to json + if (!CollectionUtils.isEmpty(encryptionMetadata)) { + schemaDocument.append("encryptMetadata", encryptionMetadata); + } + + schemaDocument.putAll(root.toDocument()); + + return schemaDocument; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DocumentJsonSchema.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DocumentJsonSchema.java index 2788dd59e5..787e94903a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DocumentJsonSchema.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DocumentJsonSchema.java @@ -36,10 +36,10 @@ class DocumentJsonSchema implements MongoJsonSchema { /* * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.MongoJsonSchema#toDocument() + * @see org.springframework.data.mongodb.core.schema.MongoJsonSchema#schema() */ @Override - public Document toDocument() { - return new Document("$jsonSchema", new Document(document)); + public Document schemaDocument() { + return new Document(document); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java index 28116e1bac..97b3cc6b46 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java @@ -523,6 +523,10 @@ public ObjectJsonSchemaProperty description(String description) { public ObjectJsonSchemaProperty generatedDescription() { return new ObjectJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.generatedDescription()); } + + public List getProperties() { + return jsonSchemaObjectDelegate.getProperties(); + } } /** @@ -1060,7 +1064,7 @@ public static class EncryptedJsonSchemaProperty implements JsonSchemaProperty { private final JsonSchemaProperty targetProperty; private final @Nullable String algorithm; private final @Nullable String keyId; - private final @Nullable List keyIds; + private final @Nullable List keyIds; /** * Create new instance of {@link EncryptedJsonSchemaProperty} wrapping the given {@link JsonSchemaProperty target}. @@ -1072,7 +1076,7 @@ public EncryptedJsonSchemaProperty(JsonSchemaProperty target) { } private EncryptedJsonSchemaProperty(JsonSchemaProperty target, @Nullable String algorithm, @Nullable String keyId, - @Nullable List keyIds) { + @Nullable List keyIds) { Assert.notNull(target, "Target must not be null!"); this.targetProperty = target; @@ -1134,6 +1138,14 @@ public EncryptedJsonSchemaProperty keys(UUID... keyId) { return new EncryptedJsonSchemaProperty(targetProperty, algorithm, null, Arrays.asList(keyId)); } + /** + * @param keyId must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + */ + public EncryptedJsonSchemaProperty keys(Object... keyId) { + return new EncryptedJsonSchemaProperty(targetProperty, algorithm, null, Arrays.asList(keyId)); + } + /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.schema.JsonSchemaObject#toDocument() diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MongoJsonSchema.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MongoJsonSchema.java index d45dcd09ec..a14cde2d3a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MongoJsonSchema.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MongoJsonSchema.java @@ -20,6 +20,7 @@ import org.bson.Document; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.ObjectJsonSchemaObject; +import org.springframework.lang.Nullable; /** * Interface defining MongoDB-specific JSON schema object. New objects can be built with {@link #builder()}, for @@ -62,13 +63,25 @@ public interface MongoJsonSchema { /** - * Create the {@link Document} containing the specified {@code $jsonSchema}.
+ * Create the {@code $jsonSchema} {@link Document} containing the specified {@link #schemaDocument()}.
* Property and field names need to be mapped to the domain type ones by running the {@link Document} through a * {@link org.springframework.data.mongodb.core.convert.JsonSchemaMapper} to apply field name customization. * * @return never {@literal null}. */ - Document toDocument(); + default Document toDocument() { + return new Document("$jsonSchema", schemaDocument()); + } + + /** + * Create the {@link Document} defining the schema.
+ * Property and field names need to be mapped to the domain type ones by running the {@link Document} through a + * {@link org.springframework.data.mongodb.core.convert.JsonSchemaMapper} to apply field name customization. + * + * @return never {@literal null}. + * @since 3.3 + */ + Document schemaDocument(); /** * Create a new {@link MongoJsonSchema} for a given root object. @@ -108,6 +121,9 @@ class MongoJsonSchemaBuilder { private ObjectJsonSchemaObject root; + @Nullable // + private Document encryptionMetadata; + MongoJsonSchemaBuilder() { root = new ObjectJsonSchemaObject(); } @@ -266,13 +282,23 @@ public MongoJsonSchemaBuilder description(String description) { return this; } + /** + * Define the {@literal encryptMetadata} element of the schema. + * + * @param encryptionMetadata can be {@literal null}. + * @since 3.3 + */ + public void encryptionMetadata(@Nullable Document encryptionMetadata) { + this.encryptionMetadata = encryptionMetadata; + } + /** * Obtain the {@link MongoJsonSchema}. * * @return new instance of {@link MongoJsonSchema}. */ public MongoJsonSchema build() { - return MongoJsonSchema.of(root); + return new DefaultMongoJsonSchema(root, encryptionMetadata); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java index 2486e98e08..59a367a9d7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java @@ -437,6 +437,10 @@ public ObjectJsonSchemaObject generatedDescription() { return newInstance(description, true, restrictions); } + public List getProperties() { + return properties; + } + /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.schema.JsonSchemaObject#toDocument() diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/encryption/EncryptionUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/encryption/EncryptionUtils.java new file mode 100644 index 0000000000..809f83fdc9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/encryption/EncryptionUtils.java @@ -0,0 +1,67 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.encryption; + +import java.util.UUID; +import java.util.function.Supplier; + +import org.springframework.data.mongodb.util.spel.ExpressionUtils; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.Expression; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Internal utility class for dealing with encryption related matters. + * + * @author Christoph Strobl + * @since 3.3 + */ +public final class EncryptionUtils { + + /** + * Resolve a given plain {@link String} value into the store native {@literal keyId} format, considering potential + * {@link Expression expressions}.
+ * The potential keyId is probed against an {@link UUID#fromString(String) UUID value} and the {@literal base64} + * encoded {@code $binary} representation. + * + * @param value the source value to resolve the keyId for. Must not be {@literal null}. + * @param evaluationContext a {@link Supplier} used to provide the {@link EvaluationContext} in case an + * {@link Expression} is {@link ExpressionUtils#detectExpression(String) detected}. + * @return can be {@literal null}. + * @throws IllegalArgumentException if one of the required arguments is {@literal null}. + */ + @Nullable + public static Object resolveKeyId(String value, Supplier evaluationContext) { + + Assert.notNull(value, "Value must not be null!"); + + Object potentialKeyId = value; + Expression expression = ExpressionUtils.detectExpression(value); + if (expression != null) { + potentialKeyId = expression.getValue(evaluationContext.get()); + if (!(potentialKeyId instanceof String)) { + return potentialKeyId; + } + } + try { + return UUID.fromString(potentialKeyId.toString()); + } catch (IllegalArgumentException e) { + return org.bson.Document.parse("{ val : { $binary : { base64 : '" + potentialKeyId + "', subType : '04'} } }") + .get("val"); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/spel/ExpressionUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/spel/ExpressionUtils.java new file mode 100644 index 0000000000..b41961e6ea --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/spel/ExpressionUtils.java @@ -0,0 +1,52 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.spel; + +import org.springframework.expression.Expression; +import org.springframework.expression.ParserContext; +import org.springframework.expression.common.LiteralExpression; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +/** + * Internal utility class for dealing with {@link Expression} and potential ones. + * + * @author Christoph Strobl + * @since 3.3 + */ +public final class ExpressionUtils { + + private static final SpelExpressionParser PARSER = new SpelExpressionParser(); + + /** + * Returns a SpEL {@link Expression} if the given {@link String} is actually an expression that does not evaluate to a + * {@link LiteralExpression} (indicating that no subsequent evaluation is necessary). + * + * @param potentialExpression can be {@literal null} + * @return can be {@literal null}. + */ + @Nullable + public static Expression detectExpression(@Nullable String potentialExpression) { + + if (!StringUtils.hasText(potentialExpression)) { + return null; + } + + Expression expression = PARSER.parseExpression(potentialExpression, ParserContext.TEMPLATE_EXPRESSION); + return expression instanceof LiteralExpression ? null : expression; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java index 9c52bbe628..9fd19189ce 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java @@ -19,23 +19,27 @@ import java.util.Collections; import java.util.Date; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.bson.Document; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; - +import org.springframework.context.support.GenericApplicationContext; import org.springframework.data.annotation.Transient; import org.springframework.data.convert.WritingConverter; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.Encrypted; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.FieldType; import org.springframework.data.mongodb.core.mapping.MongoId; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.spel.spi.EvaluationContextExtension; +import org.springframework.data.spel.spi.Function; /** * Unit tests for {@link MappingMongoJsonSchemaCreator}. @@ -95,6 +99,64 @@ public void converterRegistered() { "{ 'type' : 'object', 'properties' : { '_id' : { 'type' : 'object' }, 'nested' : { 'type' : 'object' } } }"); } + @Test // GH-3800 + public void csfle/*encryptedFieldsOnly*/() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // filter non encrypted fields + .createSchemaFor(Patient.class); + + Document targetSchema = schema.schemaDocument(); + assertThat(targetSchema).isEqualTo(Document.parse(PATIENT)); + } + + @Test // GH-3800 + public void csfleCyclic/*encryptedFieldsOnly*/() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // filter non encrypted fields + .createSchemaFor(Cyclic.class); + + Document targetSchema = schema.schemaDocument(); + assertThat(targetSchema).isNotNull(); + } + + @Test // GH-3800 + public void csfleWithKeyFromProperties() { + + GenericApplicationContext applicationContext = new GenericApplicationContext(); + applicationContext.registerBean("encryptionExtension", EncryptionExtension.class, () -> new EncryptionExtension()); + applicationContext.refresh(); + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setApplicationContext(applicationContext); + mappingContext.afterPropertiesSet(); + + MongoJsonSchema schema = MongoJsonSchemaCreator.create(mappingContext) // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // + .createSchemaFor(EncryptionMetadataFromProperty.class); + + assertThat(schema.schemaDocument()).isEqualTo(Document.parse(ENC_FROM_PROPERTY_SCHEMA)); + } + + @Test // GH-3800 + public void csfleWithKeyFromMethod() { + + GenericApplicationContext applicationContext = new GenericApplicationContext(); + applicationContext.registerBean("encryptionExtension", EncryptionExtension.class, () -> new EncryptionExtension()); + applicationContext.refresh(); + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setApplicationContext(applicationContext); + mappingContext.afterPropertiesSet(); + + MongoJsonSchema schema = MongoJsonSchemaCreator.create(mappingContext) // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // + .createSchemaFor(EncryptionMetadataFromMethod.class); + + assertThat(schema.schemaDocument()).isEqualTo(Document.parse(ENC_FROM_METHOD_SCHEMA)); + } + // --> TYPES AND JSON // --> ENUM @@ -125,8 +187,7 @@ enum JustSomeEnum { " 'collectionProperty' : { 'type' : 'array' }," + // " 'mapProperty' : { 'type' : 'object' }," + // " 'objectProperty' : { 'type' : 'object' }," + // - " 'enumProperty' : " + JUST_SOME_ENUM + // - " }" + // + " 'enumProperty' : " + JUST_SOME_ENUM + " }" + // "}"; static class VariousFieldTypes { @@ -249,4 +310,209 @@ public org.bson.Document convert(VariousFieldTypes source) { } } + static final String PATIENT = "{" + // + " 'type': 'object'," + // + " 'encryptMetadata': {" + // + " 'keyId': [" + // + " {" + // + " '$binary': {" + // + " 'base64': 'xKVup8B1Q+CkHaVRx+qa+g=='," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " }," + // + " 'properties': {" + // + " 'ssn': {" + // + " 'encrypt': {" + // + " 'bsonType': 'int'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'" + // + " }" + // + " }," + // + " 'bloodType': {" + // + " 'encrypt': {" + // + " 'bsonType': 'string'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Random'" + // + " }" + // + " }," + // + " 'medicalRecords': {" + // + " 'encrypt': {" + // + " 'bsonType': 'array'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Random'" + // + " }" + // + " }," + // + " 'insurance': {" + // + " 'type': 'object'," + // + " 'properties': {" + // + " 'policyNumber': {" + // + " 'encrypt': {" + // + " 'bsonType': 'int'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'" + // + " }" + // + " }" + // + " }" + // + " }" + // + " }" + // + "}"; + + @Encrypted(keyId = "xKVup8B1Q+CkHaVRx+qa+g==") + static class Patient { + String name; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") // + Integer ssn; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Random") // + String bloodType; + + String keyAltNameField; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Random") // + List> medicalRecords; + + Insurance insurance; + } + + static class Insurance { + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") // + Integer policyNumber; + + String provider; + } + + static final String ENC_FROM_PROPERTY_ENTITY_KEY = "C5a5aMB7Ttq4wSJTFeRn8g=="; + static final String ENC_FROM_PROPERTY_PROPOERTY_KEY = "Mw6mdTVPQfm4quqSCLVB3g="; + static final String ENC_FROM_PROPERTY_SCHEMA = "{" + // + " 'encryptMetadata': {" + // + " 'keyId': [" + // + " {" + // + " '$binary': {" + // + " 'base64': '" + ENC_FROM_PROPERTY_ENTITY_KEY + "'," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " }," + // + " 'type': 'object'," + // + " 'properties': {" + // + " 'policyNumber': {" + // + " 'encrypt': {" + // + " 'keyId': [" + // + " [" + // + " {" + // + " '$binary': {" + // + " 'base64': '" + ENC_FROM_PROPERTY_PROPOERTY_KEY + "'," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " ]," + // + " 'bsonType': 'int'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'" + // + " }" + // + " }" + // + " }" + // + "}"; + + @Encrypted(keyId = "#{entityKey}") + static class EncryptionMetadataFromProperty { + + @Encrypted(keyId = "#{propertyKey}", algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") // + Integer policyNumber; + + String provider; + } + + static final String ENC_FROM_METHOD_ENTITY_KEY = "4fPYFM9qSgyRAjgQ2u+IMQ=="; + static final String ENC_FROM_METHOD_PROPOERTY_KEY = "+idiseKwTVCJfSKC3iUeYQ=="; + static final String ENC_FROM_METHOD_SCHEMA = "{" + // + " 'encryptMetadata': {" + // + " 'keyId': [" + // + " {" + // + " '$binary': {" + // + " 'base64': '" + ENC_FROM_METHOD_ENTITY_KEY + "'," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " }," + // + " 'type': 'object'," + // + " 'properties': {" + // + " 'policyNumber': {" + // + " 'encrypt': {" + // + " 'keyId': [" + // + " [" + // + " {" + // + " '$binary': {" + // + " 'base64': '" + ENC_FROM_METHOD_PROPOERTY_KEY + "'," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " ]," + // + " 'bsonType': 'int'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'" + // + " }" + // + " }" + // + " }" + // + "}"; + + @Encrypted(keyId = "#{mongocrypt.keyId(#target)}") + static class EncryptionMetadataFromMethod { + + @Encrypted(keyId = "#{mongocrypt.keyId(#target)}", algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") // + Integer policyNumber; + + String provider; + } + + public static class EncryptionExtension implements EvaluationContextExtension { + + /* + * (non-Javadoc) + * @see org.springframework.data.spel.spi.EvaluationContextExtension#getExtensionId() + */ + @Override + public String getExtensionId() { + return "mongocrypt"; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.spel.spi.EvaluationContextExtension#getProperties() + */ + @Override + public Map getProperties() { + + Map properties = new LinkedHashMap<>(); + properties.put("entityKey", ENC_FROM_PROPERTY_ENTITY_KEY); + properties.put("propertyKey", ENC_FROM_PROPERTY_PROPOERTY_KEY); + return properties; + } + + @Override + public Map getFunctions() { + try { + return Collections.singletonMap("keyId", + new Function(EncryptionExtension.class.getMethod("keyId", String.class), this)); + } catch (NoSuchMethodException e) { + e.printStackTrace(); + } + return Collections.emptyMap(); + } + + public String keyId(String target) { + + if (target.equals("EncryptionMetadataFromMethod")) { + return ENC_FROM_METHOD_ENTITY_KEY; + } + + if (target.equals("EncryptionMetadataFromMethod.policyNumber")) { + return ENC_FROM_METHOD_PROPOERTY_KEY; + } + + return "xKVup8B1Q+CkHaVRx+qa+g=="; + } + } } diff --git a/src/main/asciidoc/reference/mongo-json-schema.adoc b/src/main/asciidoc/reference/mongo-json-schema.adoc index 5a426061a2..36c85f6fb5 100644 --- a/src/main/asciidoc/reference/mongo-json-schema.adoc +++ b/src/main/asciidoc/reference/mongo-json-schema.adoc @@ -225,6 +225,109 @@ MongoJsonSchema schema = MongoJsonSchema.builder() ---- ==== +Instead of defining encrypted fields manually it is possible leverage the `@Encrypted` annotation as shown in the snippet below. + +.Client-Side Field Level Encryption via Json Schema +==== +[source,java] +---- +@Document +@Encrypted(keyId = "xKVup8B1Q+CkHaVRx+qa+g==", algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Random") <1> +static class Patient { + + @Id String id; + String name; + + @Encrypted <2> + String bloodType; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") <3> + Integer ssn; +} +---- +<1> Default encryption settings that will be set for `encryptMetadata`. +<2> Encrypted field using default encryption settings. +<3> Encrypted field overriding the default encryption algorithm. +==== + +[TIP] +==== +The `@Encrypted` Annoation supports resolving keyIds via SpEL Expressions. +To do so additional environment metadata (via the `MappingContext`) is required and must be provided. + +[source,java] +---- +@Document +@Encrypted(keyId = "#{mongocrypt.keyId(#target)}") +static class Patient { + + @Id String id; + String name; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Random") + String bloodType; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") + Integer ssn; +} + +MongoJsonSchemaCreator schemaCreator = MongoJsonSchemaCreator.create(mappingContext); +MongoJsonSchema patientSchema = schemaCreator + .filter(MongoJsonSchemaCreator.encryptedOnly()) + .createSchemaFor(Patient.class); +---- + +The `mongocrypt.keyId` function is defined via an `EvaluationContextExtension` as shown in the snippet below. +Providing a custom extension provides the most flexible way of computing keyIds. + +[source,java] +---- +public class EncryptionExtension implements EvaluationContextExtension { + + @Override + public String getExtensionId() { + return "mongocrypt"; + } + + @Override + public Map getFunctions() { + return Collections.singletonMap("keyId", new Function(getMethod("computeKeyId", String.class), this)); + } + + public String computeKeyId(String target) { + // ... lookup via target element name + } +} +---- + +To combine derived encryption settings with `AutoEncryptionSettings` in a Spring Boot application use the `MongoClientSettingsBuilderCustomizer`. + +[source,java] +---- +@Bean +MongoClientSettingsBuilderCustomizer customizer(MappingContext mappingContext) { + return (builder) -> { + + // ... keyVaultCollection, kmsProvider, ... + + MongoJsonSchemaCreator schemaCreator = MongoJsonSchemaCreator.create(mappingContext); + MongoJsonSchema patientSchema = schemaCreator + .filter(MongoJsonSchemaCreator.encryptedOnly()) + .createSchemaFor(Patient.class); + + AutoEncryptionSettings autoEncryptionSettings = AutoEncryptionSettings.builder() + .keyVaultNamespace(keyVaultCollection) + .kmsProviders(kmsProviders) + .extraOptions(extraOpts) + .schemaMap(Collections.singletonMap("db.patient", patientSchema.schemaDocument().toBsonDocument())) + .build(); + + builder.autoEncryptionSettings(autoEncryptionSettings); + }; +} +---- +==== + NOTE: Make sure to set the drivers `com.mongodb.AutoEncryptionSettings` to use client-side encryption. MongoDB does not support encryption for all field types. Specific data types require deterministic encryption to preserve equality comparison functionality. [[mongo.jsonSchema.types]] From 9b02897db54cbd3b9edca34d4f48c2846d903499 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 13 Sep 2021 11:12:12 +0200 Subject: [PATCH 097/885] Add configuration support for MongoDB ServerApiVersion. Introduce FactoryBean and required options to set the ServerAPI. Update namespace xsd and parsing. Closes: #3820 Original pull request: #3821. --- .../mongodb/config/MongoParsingUtils.java | 17 + .../core/MongoClientSettingsFactoryBean.java | 17 +- .../core/MongoServerApiFactoryBean.java | 92 ++ .../main/resources/META-INF/spring.schemas | 6 +- .../data/mongodb/config/spring-mongo-3.3.xsd | 895 ++++++++++++++++++ .../config/MongoClientNamespaceTests.java | 13 + .../core/MongoServerApiFactoryBeanTests.java | 73 ++ .../MongoClientNamespaceTests-context.xml | 5 + 8 files changed, 1114 insertions(+), 4 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java create mode 100644 spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.3.xsd create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java index cd4d16d91b..935be95500 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java @@ -22,9 +22,12 @@ import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.CustomEditorConfigurer; import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.support.BeanDefinitionValidationException; import org.springframework.beans.factory.support.ManagedMap; import org.springframework.beans.factory.xml.BeanDefinitionParser; import org.springframework.data.mongodb.core.MongoClientSettingsFactoryBean; +import org.springframework.data.mongodb.core.MongoServerApiFactoryBean; +import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; import org.w3c.dom.Element; @@ -112,6 +115,20 @@ public static boolean parseMongoClientSettings(Element element, BeanDefinitionBu // Field level encryption setPropertyReference(clientOptionsDefBuilder, settingsElement, "encryption-settings-ref", "autoEncryptionSettings"); + // ServerAPI + if (StringUtils.hasText(settingsElement.getAttribute("server-api-version"))) { + + MongoServerApiFactoryBean serverApiFactoryBean = new MongoServerApiFactoryBean(); + serverApiFactoryBean.setVersion(settingsElement.getAttribute("server-api-version")); + try { + clientOptionsDefBuilder.addPropertyValue("serverApi", serverApiFactoryBean.getObject()); + } catch (Exception exception) { + throw new BeanDefinitionValidationException("Non parsable server-api.", exception); + } + } else { + setPropertyReference(clientOptionsDefBuilder, settingsElement, "server-api-ref", "serverApi"); + } + // and the rest mongoClientBuilder.addPropertyValue("mongoClientSettings", clientOptionsDefBuilder.getBeanDefinition()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java index 162035a45d..818dd45f3f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java @@ -36,6 +36,7 @@ import com.mongodb.ReadConcern; import com.mongodb.ReadPreference; import com.mongodb.ServerAddress; +import com.mongodb.ServerApi; import com.mongodb.WriteConcern; import com.mongodb.connection.ClusterConnectionMode; import com.mongodb.connection.ClusterType; @@ -113,6 +114,7 @@ public class MongoClientSettingsFactoryBean extends AbstractFactoryBean getObjectType() { return MongoClientSettings.class; @@ -476,9 +487,11 @@ protected MongoClientSettings createInstance() { if (retryWrites != null) { builder = builder.retryWrites(retryWrites); } - if (uUidRepresentation != null) { - builder.uuidRepresentation(uUidRepresentation); + builder = builder.uuidRepresentation(uUidRepresentation); + } + if (serverApi != null) { + builder = builder.serverApi(serverApi); } return builder.build(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java new file mode 100644 index 0000000000..e2a2fecaec --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java @@ -0,0 +1,92 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.beans.factory.FactoryBean; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +import com.mongodb.ServerApi; +import com.mongodb.ServerApi.Builder; +import com.mongodb.ServerApiVersion; + +/** + * {@link FactoryBean} for creating {@link ServerApi} using the {@link ServerApi.Builder}. + * + * @author Christoph Strobl + * @since 3.3 + */ +public class MongoServerApiFactoryBean implements FactoryBean { + + private String version; + private @Nullable Boolean deprecationErrors; + private @Nullable Boolean strict; + + /** + * @param version the version string either as the enum name or the server version value. + * @see ServerApiVersion + */ + public void setVersion(String version) { + this.version = version; + } + + /** + * @param deprecationErrors + * @see ServerApi.Builder#deprecationErrors(boolean) + */ + public void setDeprecationErrors(@Nullable Boolean deprecationErrors) { + this.deprecationErrors = deprecationErrors; + } + + /** + * @param strict + * @see ServerApi.Builder#strict(boolean) + */ + public void setStrict(@Nullable Boolean strict) { + this.strict = strict; + } + + @Nullable + @Override + public ServerApi getObject() throws Exception { + + Builder builder = ServerApi.builder().version(version()); + + if (deprecationErrors != null) { + builder = builder.deprecationErrors(deprecationErrors); + } + if (strict != null) { + builder = builder.strict(strict); + } + return builder.build(); + } + + @Nullable + @Override + public Class getObjectType() { + return ServerApi.class; + } + + private ServerApiVersion version() { + try { + // lookup by name eg. 'V1' + return ObjectUtils.caseInsensitiveValueOf(ServerApiVersion.values(), version); + } catch (IllegalArgumentException e) { + // or just the version number, eg. just '1' + return ServerApiVersion.findByValue(version); + } + } +} diff --git a/spring-data-mongodb/src/main/resources/META-INF/spring.schemas b/spring-data-mongodb/src/main/resources/META-INF/spring.schemas index 1ebb3098c7..c7f3f0ab7b 100644 --- a/spring-data-mongodb/src/main/resources/META-INF/spring.schemas +++ b/spring-data-mongodb/src/main/resources/META-INF/spring.schemas @@ -11,7 +11,8 @@ http\://www.springframework.org/schema/data/mongo/spring-mongo-1.10.2.xsd=org/sp http\://www.springframework.org/schema/data/mongo/spring-mongo-2.0.xsd=org/springframework/data/mongodb/config/spring-mongo-2.0.xsd http\://www.springframework.org/schema/data/mongo/spring-mongo-2.2.xsd=org/springframework/data/mongodb/config/spring-mongo-2.0.xsd http\://www.springframework.org/schema/data/mongo/spring-mongo-3.0.xsd=org/springframework/data/mongodb/config/spring-mongo-3.0.xsd -http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-3.0.xsd +http\://www.springframework.org/schema/data/mongo/spring-mongo-3.3.xsd=org/springframework/data/mongodb/config/spring-mongo-3.3.xsd +http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-3.3.xsd https\://www.springframework.org/schema/data/mongo/spring-mongo-1.0.xsd=org/springframework/data/mongodb/config/spring-mongo-1.0.xsd https\://www.springframework.org/schema/data/mongo/spring-mongo-1.1.xsd=org/springframework/data/mongodb/config/spring-mongo-1.1.xsd https\://www.springframework.org/schema/data/mongo/spring-mongo-1.2.xsd=org/springframework/data/mongodb/config/spring-mongo-1.2.xsd @@ -25,4 +26,5 @@ https\://www.springframework.org/schema/data/mongo/spring-mongo-1.10.2.xsd=org/s https\://www.springframework.org/schema/data/mongo/spring-mongo-2.0.xsd=org/springframework/data/mongodb/config/spring-mongo-2.0.xsd https\://www.springframework.org/schema/data/mongo/spring-mongo-2.2.xsd=org/springframework/data/mongodb/config/spring-mongo-2.2.xsd https\://www.springframework.org/schema/data/mongo/spring-mongo-3.0.xsd=org/springframework/data/mongodb/config/spring-mongo-3.0.xsd -https\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-3.0.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-3.3.xsd=org/springframework/data/mongodb/config/spring-mongo-3.3.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-3.3.xsd diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.3.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.3.xsd new file mode 100644 index 0000000000..80811306f1 --- /dev/null +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.3.xsd @@ -0,0 +1,895 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDatabaseFactory for a DB object + + + + + + + + + + + + + + The reference to a MongoTemplate. Will default to 'mongoTemplate'. + + + + + + + Enables creation of indexes for queries that get derived from the method name + and thus reference domain class properties. Defaults to false. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + The reference to a MongoTypeMapper to be used by this MappingMongoConverter. + + + + + + + The reference to a MappingContext. Will default to 'mappingContext'. + + + + + + + Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false. + + + + + + + + + + Enables abbreviating the field names for domain class properties to the + first character of their camel case names, e.g. fooBar -> fb. Defaults to false. + + + + + + + + + + The reference to a FieldNamingStrategy. + + + + + + + Enable/Disable index creation for annotated properties/entities. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A reference to a custom converter. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDatabaseFactory for a DB object + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java index 47dd85e07a..abdd00c2b5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java @@ -21,6 +21,7 @@ import java.util.Collections; import java.util.concurrent.TimeUnit; +import com.mongodb.ServerApiVersion; import org.bson.UuidRepresentation; import org.junit.Test; import org.junit.runner.RunWith; @@ -147,4 +148,16 @@ public void clientWithUUidSettings() { MongoClientSettings settings = (MongoClientSettings) getField(factoryBean, "mongoClientSettings"); assertThat(settings.getUuidRepresentation()).isEqualTo(UuidRepresentation.STANDARD); } + + @Test // DATAMONGO-2427 + public void clientWithServerVersion() { + + assertThat(ctx.containsBean("client-with-server-api-settings")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-server-api-settings", MongoClientFactoryBean.class); + + MongoClientSettings settings = (MongoClientSettings) getField(factoryBean, "mongoClientSettings"); + assertThat(settings.getServerApi()).isNotNull().satisfies(it -> { + assertThat(it.getVersion()).isEqualTo(ServerApiVersion.V1); + }); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java new file mode 100644 index 0000000000..0c79478fee --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java @@ -0,0 +1,73 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import com.mongodb.ServerApi; +import com.mongodb.ServerApiVersion; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.beans.factory.support.RootBeanDefinition; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.AutoEncryptionSettings; + +/** + * Integration tests for {@link MongoServerApiFactoryBean}. + * + * @author Christoph Strobl + */ +public class MongoServerApiFactoryBeanTests { + + @Test // DATAMONGO-2306 + public void createsServerApiForVersionString() { + + RootBeanDefinition definition = new RootBeanDefinition(MongoServerApiFactoryBean.class); + definition.getPropertyValues().addPropertyValue("version", "V1"); + definition.getPropertyValues().addPropertyValue("deprecationErrors", "true"); + + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.registerBeanDefinition("factory", definition); + + MongoServerApiFactoryBean bean = factory.getBean("&factory", MongoServerApiFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "deprecationErrors")).isEqualTo(true); + + ServerApi target = factory.getBean(ServerApi.class); + assertThat(target.getVersion()).isEqualTo(ServerApiVersion.V1); + assertThat(target.getDeprecationErrors()).contains(true); + assertThat(target.getStrict()).isNotPresent(); + } + + @Test // DATAMONGO-2306 + public void createsServerApiForVersionNumber() { + + RootBeanDefinition definition = new RootBeanDefinition(MongoServerApiFactoryBean.class); + definition.getPropertyValues().addPropertyValue("version", "1"); + definition.getPropertyValues().addPropertyValue("strict", "true"); + + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.registerBeanDefinition("factory", definition); + + MongoServerApiFactoryBean bean = factory.getBean("&factory", MongoServerApiFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "strict")).isEqualTo(true); + + ServerApi target = factory.getBean(ServerApi.class); + assertThat(target.getVersion()).isEqualTo(ServerApiVersion.V1); + assertThat(target.getDeprecationErrors()).isNotPresent(); + assertThat(target.getStrict()).contains(true); + } +} diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoClientNamespaceTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoClientNamespaceTests-context.xml index 1bd3aa2a05..79e5ac40a0 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoClientNamespaceTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoClientNamespaceTests-context.xml @@ -41,4 +41,9 @@ + + + + + From 0af8d6839e965b04717aa17f483a70af13c23a52 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 14 Sep 2021 09:11:32 +0200 Subject: [PATCH 098/885] Polishing. Reformat code, fix ticket references in tests. See #3820 Original pull request: #3821. --- .../mongodb/core/MongoServerApiFactoryBean.java | 2 +- .../config/MongoClientNamespaceTests.java | 5 +++-- .../core/MongoServerApiFactoryBeanTests.java | 16 ++++++++-------- 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java index e2a2fecaec..c93016b097 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java @@ -35,7 +35,7 @@ public class MongoServerApiFactoryBean implements FactoryBean { private @Nullable Boolean deprecationErrors; private @Nullable Boolean strict; - /** + /** * @param version the version string either as the enum name or the server version value. * @see ServerApiVersion */ diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java index abdd00c2b5..127e3d1022 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java @@ -21,10 +21,10 @@ import java.util.Collections; import java.util.concurrent.TimeUnit; -import com.mongodb.ServerApiVersion; import org.bson.UuidRepresentation; import org.junit.Test; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.data.mongodb.core.MongoClientFactoryBean; @@ -35,6 +35,7 @@ import com.mongodb.MongoClientSettings; import com.mongodb.MongoCredential; import com.mongodb.ServerAddress; +import com.mongodb.ServerApiVersion; import com.mongodb.connection.ClusterType; /** @@ -149,7 +150,7 @@ public void clientWithUUidSettings() { assertThat(settings.getUuidRepresentation()).isEqualTo(UuidRepresentation.STANDARD); } - @Test // DATAMONGO-2427 + @Test // GH-3820 public void clientWithServerVersion() { assertThat(ctx.containsBean("client-with-server-api-settings")).isTrue(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java index 0c79478fee..d584b6cfb3 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java @@ -17,24 +17,24 @@ import static org.assertj.core.api.Assertions.*; -import com.mongodb.ServerApi; -import com.mongodb.ServerApiVersion; import org.junit.jupiter.api.Test; + import org.springframework.beans.factory.support.DefaultListableBeanFactory; import org.springframework.beans.factory.support.RootBeanDefinition; import org.springframework.test.util.ReflectionTestUtils; -import com.mongodb.AutoEncryptionSettings; +import com.mongodb.ServerApi; +import com.mongodb.ServerApiVersion; /** * Integration tests for {@link MongoServerApiFactoryBean}. * * @author Christoph Strobl */ -public class MongoServerApiFactoryBeanTests { +class MongoServerApiFactoryBeanTests { - @Test // DATAMONGO-2306 - public void createsServerApiForVersionString() { + @Test // GH-3820 + void createsServerApiForVersionString() { RootBeanDefinition definition = new RootBeanDefinition(MongoServerApiFactoryBean.class); definition.getPropertyValues().addPropertyValue("version", "V1"); @@ -52,8 +52,8 @@ public void createsServerApiForVersionString() { assertThat(target.getStrict()).isNotPresent(); } - @Test // DATAMONGO-2306 - public void createsServerApiForVersionNumber() { + @Test // GH-3820 + void createsServerApiForVersionNumber() { RootBeanDefinition definition = new RootBeanDefinition(MongoServerApiFactoryBean.class); definition.getPropertyValues().addPropertyValue("version", "1"); From 8f00ffd29158ff2294a08f8e4ec3c5e8ab45e9ab Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 15 Sep 2021 15:30:10 +0200 Subject: [PATCH 099/885] Change visibility of PersistentEntitiesFactoryBean. Closes: #3825 --- .../data/mongodb/config/PersistentEntitiesFactoryBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java index ba382a32cc..29d606c4de 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java @@ -28,7 +28,7 @@ * @author Christoph Strobl * @since 3.1 */ -class PersistentEntitiesFactoryBean implements FactoryBean { +public class PersistentEntitiesFactoryBean implements FactoryBean { private final MappingMongoConverter converter; From 38e1d0d92deb9a2d0009abf335e67b6fef74f491 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 17 Sep 2021 09:44:34 +0200 Subject: [PATCH 100/885] Prepare 3.3 M3 (2021.1.0). See #3771 --- pom.xml | 8 ++++---- src/main/resources/notice.txt | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/pom.xml b/pom.xml index 7cb1d10f85..e1ecd121e4 100644 --- a/pom.xml +++ b/pom.xml @@ -15,7 +15,7 @@ org.springframework.data.build spring-data-parent - 2.6.0-SNAPSHOT + 2.6.0-M3 @@ -26,7 +26,7 @@ multi spring-data-mongodb - 2.6.0-SNAPSHOT + 2.6.0-M3 4.3.2 ${mongo} 1.19 @@ -134,8 +134,8 @@ - spring-libs-snapshot - https://repo.spring.io/libs-snapshot + spring-libs-milestone + https://repo.spring.io/libs-milestone sonatype-libs-snapshot diff --git a/src/main/resources/notice.txt b/src/main/resources/notice.txt index 29628c3570..ceef18ae5b 100644 --- a/src/main/resources/notice.txt +++ b/src/main/resources/notice.txt @@ -1,4 +1,4 @@ -Spring Data MongoDB 3.3 M2 (2021.1.0) +Spring Data MongoDB 3.3 M3 (2021.1.0) Copyright (c) [2010-2019] Pivotal Software, Inc. This product is licensed to you under the Apache License, Version 2.0 (the "License"). @@ -27,5 +27,6 @@ conditions of the subcomponent's license, as noted in the LICENSE file. + From 00350edd3265b3d81c1551e7ffb90fa1a72c9fd6 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 17 Sep 2021 09:44:56 +0200 Subject: [PATCH 101/885] Release version 3.3 M3 (2021.1.0). See #3771 --- pom.xml | 2 +- spring-data-mongodb-benchmarks/pom.xml | 2 +- spring-data-mongodb-distribution/pom.xml | 2 +- spring-data-mongodb/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index e1ecd121e4..b63985f7c6 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M3 pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index 0033bd11d5..ac1428bdf5 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M3 ../pom.xml diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index f62c8dc7f4..f3e3c3d92e 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -14,7 +14,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M3 ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 2f73c10eba..6a959b228d 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M3 ../pom.xml From 715ae26f3ccb3842f25abdd698295e365aa1b898 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 17 Sep 2021 09:52:18 +0200 Subject: [PATCH 102/885] Prepare next development iteration. See #3771 --- pom.xml | 2 +- spring-data-mongodb-benchmarks/pom.xml | 2 +- spring-data-mongodb-distribution/pom.xml | 2 +- spring-data-mongodb/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index b63985f7c6..e1ecd121e4 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M3 + 3.3.0-SNAPSHOT pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index ac1428bdf5..0033bd11d5 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M3 + 3.3.0-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index f3e3c3d92e..f62c8dc7f4 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -14,7 +14,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M3 + 3.3.0-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 6a959b228d..2f73c10eba 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M3 + 3.3.0-SNAPSHOT ../pom.xml From b7ffff47694ba4066eadc8a7359696766b53bc8e Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 17 Sep 2021 09:52:21 +0200 Subject: [PATCH 103/885] After release cleanups. See #3771 --- pom.xml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index e1ecd121e4..7cb1d10f85 100644 --- a/pom.xml +++ b/pom.xml @@ -15,7 +15,7 @@ org.springframework.data.build spring-data-parent - 2.6.0-M3 + 2.6.0-SNAPSHOT @@ -26,7 +26,7 @@ multi spring-data-mongodb - 2.6.0-M3 + 2.6.0-SNAPSHOT 4.3.2 ${mongo} 1.19 @@ -134,8 +134,8 @@ - spring-libs-milestone - https://repo.spring.io/libs-milestone + spring-libs-snapshot + https://repo.spring.io/libs-snapshot sonatype-libs-snapshot From 63d9875576beddd0651c6d7c777f54829dbb6aa1 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 6 Jul 2021 07:54:31 +0200 Subject: [PATCH 104/885] Update test for MongoDB Server 5.0. Update assertions for changed return types, add a bit of think time and disable tests for no longer supported features. See #3696 Original pull request: #3753. --- .../config/AbstractIntegrationTests.java | 15 ++++++------- .../core/geo/GeoSpatialIndexTests.java | 8 ++++--- .../core/index/IndexingIntegrationTests.java | 6 ++++- .../DefaultMessageListenerContainerTests.java | 22 ++++++++++++------- .../mongodb/test/util/MongoTestUtils.java | 17 ++++++++++++++ 5 files changed, 48 insertions(+), 20 deletions(-) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractIntegrationTests.java index 701e2eb986..00a4e9d935 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractIntegrationTests.java @@ -21,10 +21,9 @@ import java.util.Set; import org.bson.Document; -import org.junit.After; -import org.junit.Before; -import org.junit.runner.RunWith; - +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.dao.DataAccessException; @@ -32,7 +31,7 @@ import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; import com.mongodb.MongoException; import com.mongodb.client.MongoClient; @@ -41,7 +40,7 @@ /** * @author Oliver Gierke */ -@RunWith(SpringRunner.class) +@ExtendWith(SpringExtension.class) @ContextConfiguration public abstract class AbstractIntegrationTests { @@ -71,8 +70,8 @@ protected boolean autoIndexCreation() { @Autowired MongoOperations operations; - @Before - @After + @BeforeEach + @AfterEach public void cleanUp() { for (String collectionName : operations.getCollectionNames()) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatialIndexTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatialIndexTests.java index 449c78f225..10984e0a1c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatialIndexTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatialIndexTests.java @@ -21,8 +21,8 @@ import java.util.List; import java.util.Map; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DataAccessException; import org.springframework.data.geo.Point; @@ -35,6 +35,7 @@ import org.springframework.data.mongodb.core.index.IndexInfo; import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; import com.mongodb.MongoException; import com.mongodb.WriteConcern; @@ -52,7 +53,7 @@ public class GeoSpatialIndexTests extends AbstractIntegrationTests { @Autowired private MongoTemplate template; - @Before + @BeforeEach public void setUp() { template.setWriteConcern(WriteConcern.JOURNALED); @@ -82,6 +83,7 @@ public void test2dSphereIndex() { } @Test // DATAMONGO-778 + @EnableIfMongoServerVersion(isLessThan = "5.0") public void testHaystackIndex() { try { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexingIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexingIntegrationTests.java index 2c61b0fdbf..7c731a37c4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexingIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexingIntegrationTests.java @@ -155,7 +155,11 @@ public void evaluatesTimeoutSpelExpresssionWithBeanReference() { }); assertThat(indexInfo).isPresent(); - assertThat(indexInfo.get()).containsEntry("expireAfterSeconds", 11L); + assertThat(indexInfo.get()).hasEntrySatisfying("expireAfterSeconds", timeout -> { + + // MongoDB 5 returns int not long + assertThat(timeout).isIn(11, 11L); + }); } @Target({ ElementType.FIELD }) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerTests.java index b973de0cf1..c47918c565 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerTests.java @@ -38,13 +38,16 @@ import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.messaging.ChangeStreamRequest.ChangeStreamRequestOptions; import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; +import org.springframework.data.mongodb.test.util.Client; import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; import org.springframework.data.mongodb.test.util.MongoServerCondition; import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.data.mongodb.test.util.Template; import org.springframework.util.ErrorHandler; +import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.model.CreateCollectionOptions; import com.mongodb.client.model.changestream.ChangeStreamDocument; @@ -60,9 +63,12 @@ public class DefaultMessageListenerContainerTests { static final String DATABASE_NAME = "change-stream-events"; static final String COLLECTION_NAME = "collection-1"; static final String COLLECTION_2_NAME = "collection-2"; + static final String COLLECTION_3_NAME = "collection-3"; static final Duration TIMEOUT = Duration.ofSeconds(2); + @Client static MongoClient client; + @Template(database = DATABASE_NAME, initialEntitySet = Person.class) // static MongoTemplate template; @@ -74,10 +80,13 @@ public class DefaultMessageListenerContainerTests { private CollectingMessageListener messageListener; @BeforeEach - void beforeEach() { + void beforeEach() throws InterruptedException { + + MongoTestUtils.dropCollectionNow(DATABASE_NAME, COLLECTION_NAME, client); + MongoTestUtils.dropCollectionNow(DATABASE_NAME, COLLECTION_2_NAME, client); + MongoTestUtils.dropCollectionNow(DATABASE_NAME, COLLECTION_3_NAME, client); - template.dropCollection(COLLECTION_NAME); - template.dropCollection(COLLECTION_2_NAME); + Thread.sleep(100); messageListener = new CollectingMessageListener<>(); } @@ -281,7 +290,7 @@ public void abortsSubscriptionOnError() throws InterruptedException { @Test // DATAMONGO-1803 public void callsDefaultErrorHandlerOnError() throws InterruptedException { - dbFactory.getMongoDatabase().createCollection(COLLECTION_NAME, + dbFactory.getMongoDatabase().createCollection(COLLECTION_3_NAME, new CreateCollectionOptions().capped(true).maxDocuments(10000).sizeInBytes(10000)); collection.insertOne(new Document("_id", "id-1").append("value", "foo")); @@ -298,10 +307,7 @@ public void callsDefaultErrorHandlerOnError() throws InterruptedException { Document.class); SubscriptionUtils.awaitSubscription(subscription); - - template.dropCollection(COLLECTION_NAME); - - Thread.sleep(20); + dbFactory.getMongoDatabase().drop(); verify(errorHandler, atLeast(1)).handleError(any(DataAccessException.class)); } finally { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestUtils.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestUtils.java index 5a41e8a68c..7e29d2a272 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestUtils.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestUtils.java @@ -166,6 +166,23 @@ public static void dropCollectionNow(String dbName, String collectionName, .verifyComplete(); } + /** + * Create a {@link com.mongodb.client.MongoCollection} if it does not exist, or drop and recreate it if it does and + * verify operation result. + * + * @param dbName must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param client must not be {@literal null}. + */ + public static void dropCollectionNow(String dbName, String collectionName, + com.mongodb.client.MongoClient client) { + + com.mongodb.client.MongoDatabase database = client.getDatabase(dbName) + .withWriteConcern(WriteConcern.MAJORITY).withReadPreference(ReadPreference.primary()); + + database.getCollection(collectionName).drop(); + } + /** * Remove all documents from the {@link MongoCollection} with given name in the according {@link MongoDatabase * database}. From 2f208d712ca2353c45529132ba88e3e9b4c339eb Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 6 Jul 2021 09:28:32 +0200 Subject: [PATCH 105/885] Update CI to cover MongoDB Server 5.0. MongoDB has alpha releases in a slightly different location on their distribution server. And they use different keys for signing these alpha releases compared to the overall package listing. Closes #3696 Original pull request: #3753. --- Jenkinsfile | 40 ++++++++++++++++++++++++++++++ ci/openjdk8-mongodb-5.0/Dockerfile | 17 +++++++++++++ 2 files changed, 57 insertions(+) create mode 100644 ci/openjdk8-mongodb-5.0/Dockerfile diff --git a/Jenkinsfile b/Jenkinsfile index 1eb84755a5..1ee5ed5c5f 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -14,6 +14,22 @@ pipeline { stages { stage("Docker images") { parallel { + stage('Publish JDK 8 + MongoDB 5.0') { + when { + changeset "ci/openjdk8-mongodb-5.0/**" + } + agent { label 'data' } + options { timeout(time: 30, unit: 'MINUTES') } + + steps { + script { + def image = docker.build("springci/spring-data-openjdk8-with-mongodb-5.0.0", "ci/openjdk8-mongodb-5.0/") + docker.withRegistry('', 'hub.docker.com-springbuildmaster') { + image.push() + } + } + } + } stage('Publish JDK 8 + MongoDB 4.0') { when { changeset "ci/openjdk8-mongodb-4.0/**" @@ -151,6 +167,30 @@ pipeline { } } + stage("test: mongodb 5.0 (jdk8)") { + agent { + label 'data' + } + options { timeout(time: 30, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c') + } + steps { + script { + docker.withRegistry('', 'hub.docker.com-springbuildmaster') { + docker.image('springci/spring-data-openjdk8-with-mongodb-5.0.0:latest').inside('-v $HOME:/tmp/jenkins-home') { + sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log' + sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &' + sh 'sleep 10' + sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"' + sh 'sleep 15' + sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B' + } + } + } + } + } + stage("test: baseline (jdk16)") { agent { label 'data' diff --git a/ci/openjdk8-mongodb-5.0/Dockerfile b/ci/openjdk8-mongodb-5.0/Dockerfile new file mode 100644 index 0000000000..658b615bbd --- /dev/null +++ b/ci/openjdk8-mongodb-5.0/Dockerfile @@ -0,0 +1,17 @@ +FROM adoptopenjdk/openjdk8:latest + +ENV TZ=Etc/UTC +ENV DEBIAN_FRONTEND=noninteractive + +RUN set -eux; \ + apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget ; \ + # MongoDB 5.0 release signing key + apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv B00A0BD1E2C63C11 ; \ + # Needed when MongoDB creates a 5.0 folder. + echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/5.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-5.0.list; \ + echo ${TZ} > /etc/timezone; + +RUN apt-get update; \ + apt-get install -y mongodb-org=5.0.0 mongodb-org-server=5.0.0 mongodb-org-shell=5.0.0 mongodb-org-mongos=5.0.0 mongodb-org-tools=5.0.0; \ + apt-get clean; \ + rm -rf /var/lib/apt/lists/*; From 7f585382925f4a40153221e4039268cf4be40c97 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 21 Sep 2021 15:16:12 +0200 Subject: [PATCH 106/885] Use HTTPS in Dockerfiles for package download. See #3696 Original pull request: #3753. --- ci/openjdk11-mongodb-4.4/Dockerfile | 3 +++ ci/openjdk16-mongodb-4.4/Dockerfile | 5 ++++- ci/openjdk8-mongodb-4.0/Dockerfile | 3 +++ ci/openjdk8-mongodb-4.4/Dockerfile | 3 +++ ci/openjdk8-mongodb-5.0/Dockerfile | 5 ++++- 5 files changed, 17 insertions(+), 2 deletions(-) diff --git a/ci/openjdk11-mongodb-4.4/Dockerfile b/ci/openjdk11-mongodb-4.4/Dockerfile index 6c94ac38ff..7de227c4d9 100644 --- a/ci/openjdk11-mongodb-4.4/Dockerfile +++ b/ci/openjdk11-mongodb-4.4/Dockerfile @@ -4,6 +4,9 @@ ENV TZ=Etc/UTC ENV DEBIAN_FRONTEND=noninteractive RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/http/https/g' /etc/apt/sources.list ; \ apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \ apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \ echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \ diff --git a/ci/openjdk16-mongodb-4.4/Dockerfile b/ci/openjdk16-mongodb-4.4/Dockerfile index 7a1e47cf00..5f49272c4a 100644 --- a/ci/openjdk16-mongodb-4.4/Dockerfile +++ b/ci/openjdk16-mongodb-4.4/Dockerfile @@ -1,9 +1,12 @@ -FROM adoptopenjdk/openjdk16:latest +FROM adoptopenjdk/openjdk16:latest ENV TZ=Etc/UTC ENV DEBIAN_FRONTEND=noninteractive RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/http/https/g' /etc/apt/sources.list ; \ apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \ apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \ echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \ diff --git a/ci/openjdk8-mongodb-4.0/Dockerfile b/ci/openjdk8-mongodb-4.0/Dockerfile index e05068ab32..bb75ccfc14 100644 --- a/ci/openjdk8-mongodb-4.0/Dockerfile +++ b/ci/openjdk8-mongodb-4.0/Dockerfile @@ -4,6 +4,9 @@ ENV TZ=Etc/UTC ENV DEBIAN_FRONTEND=noninteractive RUN RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/http/https/g' /etc/apt/sources.list ; \ apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \ apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 ; \ echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list; \ diff --git a/ci/openjdk8-mongodb-4.4/Dockerfile b/ci/openjdk8-mongodb-4.4/Dockerfile index 79774dd269..f9a814533b 100644 --- a/ci/openjdk8-mongodb-4.4/Dockerfile +++ b/ci/openjdk8-mongodb-4.4/Dockerfile @@ -4,6 +4,9 @@ ENV TZ=Etc/UTC ENV DEBIAN_FRONTEND=noninteractive RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/http/https/g' /etc/apt/sources.list ; \ apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \ apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \ echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \ diff --git a/ci/openjdk8-mongodb-5.0/Dockerfile b/ci/openjdk8-mongodb-5.0/Dockerfile index 658b615bbd..53509efd05 100644 --- a/ci/openjdk8-mongodb-5.0/Dockerfile +++ b/ci/openjdk8-mongodb-5.0/Dockerfile @@ -4,6 +4,9 @@ ENV TZ=Etc/UTC ENV DEBIAN_FRONTEND=noninteractive RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/http/https/g' /etc/apt/sources.list ; \ apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget ; \ # MongoDB 5.0 release signing key apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv B00A0BD1E2C63C11 ; \ @@ -12,6 +15,6 @@ RUN set -eux; \ echo ${TZ} > /etc/timezone; RUN apt-get update; \ - apt-get install -y mongodb-org=5.0.0 mongodb-org-server=5.0.0 mongodb-org-shell=5.0.0 mongodb-org-mongos=5.0.0 mongodb-org-tools=5.0.0; \ + apt-get install -y mongodb-org=5.0.3 mongodb-org-server=5.0.3 mongodb-org-shell=5.0.3 mongodb-org-mongos=5.0.3 mongodb-org-tools=5.0.3; \ apt-get clean; \ rm -rf /var/lib/apt/lists/*; From 9e2f6055a3917b8f9927859f28b389765eb2bd68 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 6 Jul 2021 10:40:10 +0200 Subject: [PATCH 107/885] Refine CI job triggers. See #3696 Original pull request: #3753. --- Jenkinsfile | 38 ++++++-------------------------------- 1 file changed, 6 insertions(+), 32 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 1ee5ed5c5f..a7e2d38bb9 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -83,8 +83,9 @@ pipeline { stage("test: baseline (jdk8)") { when { + beforeAgent(true) anyOf { - branch 'main' + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") not { triggeredBy 'UpstreamCause' } } } @@ -113,8 +114,9 @@ pipeline { stage("Test other configurations") { when { + beforeAgent(true) allOf { - branch 'main' + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") not { triggeredBy 'UpstreamCause' } } } @@ -219,8 +221,9 @@ pipeline { stage('Release to artifactory') { when { + beforeAgent(true) anyOf { - branch 'main' + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") not { triggeredBy 'UpstreamCause' } } } @@ -250,35 +253,6 @@ pipeline { } } } - - stage('Publish documentation') { - when { - branch 'main' - } - agent { - label 'data' - } - options { timeout(time: 20, unit: 'MINUTES') } - - environment { - ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c') - } - - steps { - script { - docker.withRegistry('', 'hub.docker.com-springbuildmaster') { - docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') { - sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,distribute ' + - '-Dartifactory.server=https://repo.spring.io ' + - "-Dartifactory.username=${ARTIFACTORY_USR} " + - "-Dartifactory.password=${ARTIFACTORY_PSW} " + - "-Dartifactory.distribution-repository=temp-private-local " + - '-Dmaven.test.skip=true clean deploy -U -B' - } - } - } - } - } } post { From 2f98a6656bb17b5f909a39d4bf5df8554864ce28 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Fri, 24 Sep 2021 10:35:38 +0200 Subject: [PATCH 108/885] Fix javadoc errors and warnings Closes: #3835 --- .../data/mongodb/BindableMongoExpression.java | 2 +- .../data/mongodb/MongoCollectionUtils.java | 4 +- .../data/mongodb/MongoDatabaseUtils.java | 10 +- .../data/mongodb/MongoExpression.java | 2 +- .../data/mongodb/MongoResourceHolder.java | 2 +- .../data/mongodb/MongoTransactionManager.java | 16 +- .../mongodb/ReactiveMongoDatabaseUtils.java | 10 +- .../mongodb/ReactiveMongoResourceHolder.java | 2 +- .../ReactiveMongoTransactionManager.java | 12 +- .../SessionAwareMethodInterceptor.java | 2 +- .../data/mongodb/SpringDataMongoDB.java | 2 +- .../config/MongoConfigurationSupport.java | 3 +- .../mongodb/core/ChangeStreamOptions.java | 4 +- .../data/mongodb/core/CollectionOptions.java | 2 +- .../mongodb/core/ExecutableFindOperation.java | 2 +- .../core/ExecutableUpdateOperation.java | 2 +- .../mongodb/core/FindAndReplaceOptions.java | 2 +- .../core/MongoDatabaseFactorySupport.java | 2 +- .../mongodb/core/MongoDbFactorySupport.java | 2 +- .../mongodb/core/MongoJsonSchemaCreator.java | 3 +- .../data/mongodb/core/MongoOperations.java | 129 +++++++------ .../data/mongodb/core/MongoTemplate.java | 6 +- .../core/ReactiveChangeStreamOperation.java | 2 +- .../mongodb/core/ReactiveFindOperation.java | 6 +- .../mongodb/core/ReactiveMongoOperations.java | 181 +++++++++--------- .../mongodb/core/ReactiveMongoTemplate.java | 6 +- .../mongodb/core/ReactiveSessionCallback.java | 2 +- .../mongodb/core/ReactiveSessionScoped.java | 4 +- .../data/mongodb/core/ScriptOperations.java | 6 +- .../data/mongodb/core/SessionCallback.java | 2 +- .../data/mongodb/core/SessionScoped.java | 6 +- .../mongodb/core/aggregation/Aggregation.java | 4 +- .../AggregationSpELExpression.java | 4 +- .../core/aggregation/AggregationUpdate.java | 3 +- .../core/aggregation/ArithmeticOperators.java | 34 ++-- .../core/aggregation/BucketAutoOperation.java | 6 +- .../core/aggregation/BucketOperation.java | 3 +- .../core/aggregation/ConvertOperators.java | 4 +- .../core/aggregation/CountOperation.java | 3 +- .../core/aggregation/DateOperators.java | 6 +- .../aggregation/GraphLookupOperation.java | 3 +- .../core/aggregation/GroupOperation.java | 2 +- ...DelegatingAggregationOperationContext.java | 2 +- .../core/aggregation/RedactOperation.java | 3 +- .../core/aggregation/ScriptOperators.java | 36 ++-- .../aggregation/SetWindowFieldsOperation.java | 3 +- .../aggregation/SortByCountOperation.java | 7 +- .../core/aggregation/UnionWithOperation.java | 2 +- .../core/convert/MappingMongoConverter.java | 2 +- .../mongodb/core/convert/QueryMapper.java | 2 +- .../data/mongodb/core/geo/GeoJsonModule.java | 2 +- .../mongodb/core/index/CompoundIndex.java | 2 +- .../mongodb/core/index/DurationStyle.java | 2 +- .../data/mongodb/core/index/HashIndexed.java | 2 +- .../mongodb/core/index/WildcardIndex.java | 4 +- .../data/mongodb/core/mapping/Encrypted.java | 4 +- .../data/mongodb/core/mapping/Field.java | 2 +- .../data/mongodb/core/mapping/FieldType.java | 2 +- .../data/mongodb/core/mapping/ShardKey.java | 2 +- .../data/mongodb/core/mapping/Sharded.java | 6 +- .../data/mongodb/core/mapping/TimeSeries.java | 3 +- .../data/mongodb/core/mapping/Unwrapped.java | 4 +- .../core/mapping/event/AfterDeleteEvent.java | 2 +- .../core/messaging/ChangeStreamRequest.java | 14 +- .../DefaultMessageListenerContainer.java | 2 +- .../messaging/MessageListenerContainer.java | 24 +-- .../mongodb/core/messaging/Subscription.java | 4 +- .../core/messaging/TailableCursorRequest.java | 6 +- .../data/mongodb/core/query/Collation.java | 2 +- .../data/mongodb/core/query/Criteria.java | 4 +- .../data/mongodb/core/query/NearQuery.java | 2 +- .../mongodb/core/schema/JsonSchemaObject.java | 2 +- .../mongodb/core/script/NamedMongoScript.java | 4 +- .../data/mongodb/repository/Aggregation.java | 26 +-- .../data/mongodb/repository/Query.java | 12 +- .../repository/query/MongoQueryCreator.java | 2 +- 76 files changed, 346 insertions(+), 360 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java index 982f683d53..ac735be37f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java @@ -31,7 +31,7 @@ * expression. The expression will be wrapped within { ... } if necessary. The actual parsing and parameter * binding of placeholders like {@code ?0} is delayed upon first call on the the target {@link Document} via * {@link #toDocument()}. - *

+ *
* *

  * $toUpper : $name                -> { '$toUpper' : '$name' }
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java
index 3d85a33dcb..1b796eabd2 100644
--- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java
+++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java
@@ -20,8 +20,8 @@
 
 /**
  * Helper class featuring helper methods for working with MongoDb collections.
- * 

- *

+ *
+ *
* Mainly intended for internal use within the framework. * * @author Thomas Risberg diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java index c9342ec4f6..f0b6c2228a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java @@ -30,7 +30,7 @@ * Helper class for managing a {@link MongoDatabase} instances via {@link MongoDatabaseFactory}. Used for obtaining * {@link ClientSession session bound} resources, such as {@link MongoDatabase} and * {@link com.mongodb.client.MongoCollection} suitable for transactional usage. - *

+ *
* Note: Intended for internal usage only. * * @author Christoph Strobl @@ -43,7 +43,7 @@ public class MongoDatabaseUtils { /** * Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory} using * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. - *

+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -56,7 +56,7 @@ public static MongoDatabase getDatabase(MongoDatabaseFactory factory) { /** * Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory}. - *

+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -71,7 +71,7 @@ public static MongoDatabase getDatabase(MongoDatabaseFactory factory, SessionSyn /** * Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory} using * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. - *

+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -85,7 +85,7 @@ public static MongoDatabase getDatabase(@Nullable String dbName, MongoDatabaseFa /** * Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory}. - *

+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java index 541118b114..2ea38af67f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java @@ -18,7 +18,7 @@ /** * Wrapper object for MongoDB expressions like {@code $toUpper : $name} that manifest as {@link org.bson.Document} when * passed on to the driver. - *

+ *
* A set of predefined {@link MongoExpression expressions}, including a * {@link org.springframework.data.mongodb.core.aggregation.AggregationSpELExpression SpEL based variant} for method * like expressions (eg. {@code toUpper(name)}) are available via the diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java index 90a3b32023..157489e11c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java @@ -24,7 +24,7 @@ /** * MongoDB specific {@link ResourceHolderSupport resource holder}, wrapping a {@link ClientSession}. * {@link MongoTransactionManager} binds instances of this class to the thread. - *

+ *
* Note: Intended for internal usage only. * * @author Christoph Strobl diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java index 1e6013d73d..d244da6296 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java @@ -37,18 +37,18 @@ /** * A {@link org.springframework.transaction.PlatformTransactionManager} implementation that manages * {@link ClientSession} based transactions for a single {@link MongoDatabaseFactory}. - *

+ *
* Binds a {@link ClientSession} from the specified {@link MongoDatabaseFactory} to the thread. - *

+ *
* {@link TransactionDefinition#isReadOnly() Readonly} transactions operate on a {@link ClientSession} and enable causal * consistency, and also {@link ClientSession#startTransaction() start}, {@link ClientSession#commitTransaction() * commit} or {@link ClientSession#abortTransaction() abort} a transaction. - *

+ *
* Application code is required to retrieve the {@link com.mongodb.client.MongoDatabase} via * {@link MongoDatabaseUtils#getDatabase(MongoDatabaseFactory)} instead of a standard * {@link MongoDatabaseFactory#getMongoDatabase()} call. Spring classes such as * {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly. - *

+ *
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. One may override * {@link #doCommit(MongoTransactionObject)} to implement the * Retry Commit Operation @@ -69,11 +69,11 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager /** * Create a new {@link MongoTransactionManager} for bean-style usage. - *

+ *
* Note:The {@link MongoDatabaseFactory db factory} has to be * {@link #setDbFactory(MongoDatabaseFactory) set} before using the instance. Use this constructor to prepare a * {@link MongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}. - *

+ *
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining * {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}. * @@ -212,8 +212,8 @@ protected final void doCommit(DefaultTransactionStatus status) throws Transactio * By default those labels are ignored, nevertheless one might check for * {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the * commit.
+ *

 	 * 
-	 *     
 	 * int retries = 3;
 	 * do {
 	 *     try {
@@ -226,8 +226,8 @@ protected final void doCommit(DefaultTransactionStatus status) throws Transactio
 	 *     }
 	 *     Thread.sleep(500);
 	 * } while (--retries > 0);
-	 *     
*
+ *
* * @param transactionObject never {@literal null}. * @throws Exception in case of transaction errors. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java index 4699ac56c2..4ae9e227f1 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java @@ -36,7 +36,7 @@ * Helper class for managing reactive {@link MongoDatabase} instances via {@link ReactiveMongoDatabaseFactory}. Used for * obtaining {@link ClientSession session bound} resources, such as {@link MongoDatabase} and {@link MongoCollection} * suitable for transactional usage. - *

+ *
* Note: Intended for internal usage only. * * @author Mark Paluch @@ -75,7 +75,7 @@ public static Mono isTransactionActive(ReactiveMongoDatabaseFactory dat /** * Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory} using * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. - *

+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -88,7 +88,7 @@ public static Mono getDatabase(ReactiveMongoDatabaseFactory facto /** * Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory}. - *

+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -104,7 +104,7 @@ public static Mono getDatabase(ReactiveMongoDatabaseFactory facto /** * Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory * factory} using {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. - *

+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -119,7 +119,7 @@ public static Mono getDatabase(String dbName, ReactiveMongoDataba /** * Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory * factory}. - *

+ *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java index b1f1c06d08..b3338fd7ba 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java @@ -24,7 +24,7 @@ /** * MongoDB specific resource holder, wrapping a {@link ClientSession}. {@link ReactiveMongoTransactionManager} binds * instances of this class to the subscriber context. - *

+ *
* Note: Intended for internal usage only. * * @author Mark Paluch diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java index 63706eff8a..711af76f53 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java @@ -38,21 +38,21 @@ * A {@link org.springframework.transaction.ReactiveTransactionManager} implementation that manages * {@link com.mongodb.reactivestreams.client.ClientSession} based transactions for a single * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory}. - *

+ *
* Binds a {@link ClientSession} from the specified * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory} to the subscriber * {@link reactor.util.context.Context}. - *

+ *
* {@link org.springframework.transaction.TransactionDefinition#isReadOnly() Readonly} transactions operate on a * {@link ClientSession} and enable causal consistency, and also {@link ClientSession#startTransaction() start}, * {@link com.mongodb.reactivestreams.client.ClientSession#commitTransaction() commit} or * {@link ClientSession#abortTransaction() abort} a transaction. - *

+ *
* Application code is required to retrieve the {@link com.mongodb.reactivestreams.client.MongoDatabase} via * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory)} instead * of a standard {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()} call. Spring * classes such as {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate} use this strategy implicitly. - *

+ *
* By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. You can override * {@link #doCommit(TransactionSynchronizationManager, ReactiveMongoTransactionObject)} to implement the * Retry Commit Operation @@ -71,11 +71,11 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction /** * Create a new {@link ReactiveMongoTransactionManager} for bean-style usage. - *

+ *
* Note:The {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory db factory} has to * be {@link #setDatabaseFactory(ReactiveMongoDatabaseFactory)} set} before using the instance. Use this constructor * to prepare a {@link ReactiveMongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}. - *

+ *
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining * {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java index da48f22154..b9b2c88130 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java @@ -35,7 +35,7 @@ /** * {@link MethodInterceptor} implementation looking up and invoking an alternative target method having * {@link ClientSession} as its first argument. This allows seamless integration with the existing code base. - *

+ *
* The {@link MethodInterceptor} is aware of methods on {@code MongoCollection} that my return new instances of itself * like (eg. {@link com.mongodb.reactivestreams.client.MongoCollection#withWriteConcern(WriteConcern)} and decorate them * if not already proxied. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java index dbbf146fc1..808b576bcb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java @@ -48,7 +48,7 @@ public static MongoDriverInformation driverInformation() { /** * Fetches the "Implementation-Version" manifest attribute from the jar file. - *

+ *
* Note that some ClassLoaders do not expose the package metadata, hence this class might not be able to determine the * version in all environments. In this case the current Major version is returned as a fallback. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java index 52ec72d171..5fe0c4fe4e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java @@ -172,8 +172,7 @@ protected Set> scanForEntities(String basePackage) throws ClassNotFound /** * Configures whether to abbreviate field names for domain objects by configuring a - * {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. For advanced - * customization needs, consider overriding {@link #mappingMongoConverter()}. + * {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. * * @return */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java index a4f6f7e226..3fe6767533 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java @@ -242,13 +242,13 @@ public ChangeStreamOptionsBuilder collation(Collation collation) { /** * Set the filter to apply. - *

+ *
* Fields on aggregation expression root level are prefixed to map to fields contained in * {@link ChangeStreamDocument#getFullDocument() fullDocument}. However {@literal operationType}, {@literal ns}, * {@literal documentKey} and {@literal fullDocument} are reserved words that will be omitted, and therefore taken * as given, during the mapping procedure. You may want to have a look at the * structure of Change Events. - *

+ *
* Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to ensure filter expressions are * mapped to domain type fields. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java index edff52bb74..f866896694 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java @@ -428,7 +428,7 @@ public Optional getValidationLevel() { /** * Get the {@code validationAction} to perform. * - * @return @return {@link Optional#empty()} if not set. + * @return {@link Optional#empty()} if not set. */ public Optional getValidationAction() { return Optional.ofNullable(validationAction); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java index d67212bdc6..f41af5c6c5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java @@ -125,7 +125,7 @@ default Optional first() { /** * Get the number of matching elements. - *

+ *
* This method uses an {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) aggregation * execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees shard, * session and transaction compliance. In case an inaccurate count satisfies the applications needs use diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java index a8b58669e3..32b7017e41 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java @@ -89,7 +89,7 @@ default Optional findAndModify() { /** * Trigger - * findOneAndReplace + * findOneAndReplace * execution by calling one of the terminating methods. * * @author Mark Paluch diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java index 6122837a27..42a8a3ef77 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java @@ -17,7 +17,7 @@ /** * Options for - * findOneAndReplace. + * findOneAndReplace. *
* Defaults to *

diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java index dac4b0d6d7..9c8419a154 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java @@ -33,7 +33,7 @@ /** * Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as * database name and exception translator. - *

+ *
* Not intended to be used directly. * * @author Christoph Strobl diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDbFactorySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDbFactorySupport.java index bc0e39bbc9..ba530d502f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDbFactorySupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDbFactorySupport.java @@ -20,7 +20,7 @@ /** * Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as * database name and exception translator. - *

+ *
* Not intended to be used directly. * * @author Christoph Strobl diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java index 5e5bc50644..f5b620d0fb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java @@ -40,6 +40,7 @@ * following mapping rules. *

* Required Properties + *

*
    *
  • Properties of primitive type
  • *
@@ -61,7 +62,7 @@ * {@link org.springframework.data.annotation.Id _id} properties using types that can be converted into * {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more * specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation. - *

+ * {@link Encrypted} properties will contain {@literal encrypt} information. * * @author Christoph Strobl diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java index e4a4b0868f..c015fb5a49 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java @@ -58,7 +58,7 @@ * Interface that specifies a basic set of MongoDB operations. Implemented by {@link MongoTemplate}. Not often used but * a useful option for extensibility and testability (as it can be easily mocked, stubbed, or be the target of a JDK * proxy). - *

+ *
* NOTE: Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB * specific documentation to learn more about Multi * Document Transactions. @@ -125,7 +125,7 @@ public interface MongoOperations extends FluentMongoOperations { /** * Executes a {@link DbCallback} translating any exceptions as necessary. - *

+ *
* Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not @@ -138,7 +138,7 @@ public interface MongoOperations extends FluentMongoOperations { /** * Executes the given {@link CollectionCallback} on the entity collection of the specified class. - *

+ *
* Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param entityClass class that determines the collection to use. Must not be {@literal null}. @@ -151,7 +151,7 @@ public interface MongoOperations extends FluentMongoOperations { /** * Executes the given {@link CollectionCallback} on the collection of the given name. - *

+ *
* Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be @@ -176,7 +176,7 @@ public interface MongoOperations extends FluentMongoOperations { /** * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession} * provided by the given {@link Supplier} to each and every command issued against MongoDB. - *

+ *
* Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use the * {@link SessionScoped#execute(SessionCallback, Consumer)} hook to potentially close the {@link ClientSession}. * @@ -212,7 +212,7 @@ public T execute(SessionCallback action, Consumer onComple /** * Obtain a {@link ClientSession} bound instance of {@link MongoOperations}. - *

+ *
* Note: It is up to the caller to manage the {@link ClientSession} lifecycle. * * @param session must not be {@literal null}. @@ -300,7 +300,7 @@ public T execute(SessionCallback action, Consumer onComple * is created on first interaction with the server. Collections can be explicitly created via * {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class) * exists} first. - *

+ *
* Translate any exceptions as necessary. * * @param collectionName name of the collection. Must not be {@literal null}. @@ -310,7 +310,7 @@ public T execute(SessionCallback action, Consumer onComple /** * Check to see if a collection with a name indicated by the entity class exists. - *

+ *
* Translate any exceptions as necessary. * * @param entityClass class that determines the name of the collection. Must not be {@literal null}. @@ -320,7 +320,7 @@ public T execute(SessionCallback action, Consumer onComple /** * Check to see if a collection with a given name exists. - *

+ *
* Translate any exceptions as necessary. * * @param collectionName name of the collection. Must not be {@literal null}. @@ -330,7 +330,7 @@ public T execute(SessionCallback action, Consumer onComple /** * Drop the collection with the name indicated by the entity class. - *

+ *
* Translate any exceptions as necessary. * * @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}. @@ -339,7 +339,7 @@ public T execute(SessionCallback action, Consumer onComple /** * Drop the collection with the given name. - *

+ *
* Translate any exceptions as necessary. * * @param collectionName name of the collection to drop/delete. @@ -403,10 +403,10 @@ public T execute(SessionCallback action, Consumer onComple /** * Query for a list of objects of type T from the collection used by the entity class. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -417,10 +417,10 @@ public T execute(SessionCallback action, Consumer onComple /** * Query for a list of objects of type T from the specified collection. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -539,11 +539,11 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN /** * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. - *

+ *
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that * needs to be closed. The raw results will be mapped to the given entity class and are returned as stream. The name * of the inputCollection is derived from the inputType of the aggregation. - *

+ *
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. * @@ -557,10 +557,10 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN /** * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. - *

+ *
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that * needs to be closed. The raw results will be mapped to the given entity class. - *

+ *
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. * @@ -576,10 +576,10 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN /** * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. - *

+ *
* Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that * needs to be closed. The raw results will be mapped to the given entity class. - *

+ *
* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. * @@ -702,10 +702,10 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin /** * Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the * specified type. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -720,10 +720,10 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified * type. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -768,10 +768,10 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin /** * Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -784,10 +784,10 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin /** * Map the results of an ad-hoc query on the specified collection to a List of the specified type. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -881,7 +881,7 @@ default List findDistinct(Query query, String field, String collection, C } /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. * * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional @@ -897,7 +897,7 @@ default List findDistinct(Query query, String field, String collection, C T findAndModify(Query query, UpdateDefinition update, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. * * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional @@ -914,7 +914,7 @@ default List findDistinct(Query query, String field, String collection, C T findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. * @@ -934,7 +934,7 @@ default List findDistinct(Query query, String field, String collection, C T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. * @@ -957,7 +957,7 @@ T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions o /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} * document.
* The collection name is derived from the {@literal replacement} type.
@@ -977,7 +977,7 @@ default T findAndReplace(Query query, T replacement) { /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} * document.
* Options are defaulted to {@link FindAndReplaceOptions#empty()}.
@@ -997,7 +997,7 @@ default T findAndReplace(Query query, T replacement, String collectionName) /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}. @@ -1018,7 +1018,7 @@ default T findAndReplace(Query query, T replacement, FindAndReplaceOptions o /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}. @@ -1041,7 +1041,7 @@ default T findAndReplace(Query query, T replacement, FindAndReplaceOptions o /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}. @@ -1066,7 +1066,7 @@ default T findAndReplace(Query query, T replacement, FindAndReplaceOptions o /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}. @@ -1094,7 +1094,7 @@ default T findAndReplace(Query query, S replacement, FindAndReplaceOption /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}. @@ -1120,9 +1120,9 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option * Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the * specified type. The first document that matches the query is returned and also removed from the collection in the * database. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. - *

+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -1137,10 +1137,10 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified * type. The first document that matches the query is returned and also removed from the collection in the database. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -1160,7 +1160,7 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to * count all matches. - *

+ *
* This method uses an * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees @@ -1182,7 +1182,7 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to * count all matches. - *

+ *
* This method uses an * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees @@ -1199,7 +1199,7 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option /** * Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type}, * based on collection statistics. - *

+ *
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside * transactions. * @@ -1215,7 +1215,7 @@ default long estimatedCount(Class entityClass) { /** * Estimate the number of documents in the given collection based on collection statistics. - *

+ *
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside * transactions. * @@ -1232,7 +1232,7 @@ default long estimatedCount(Class entityClass) { * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to * count all matches. - *

+ *
* This method uses an * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees @@ -1249,17 +1249,17 @@ default long estimatedCount(Class entityClass) { /** * Insert the object into the collection for the entity type of the object to save. - *

+ *
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. - *

+ *
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See * Spring's * Type Conversion" for more details. - *

+ *
* Insert is used to initially store the object into the database. To update an existing object use the save method. - *

+ *
* The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. @@ -1270,12 +1270,12 @@ default long estimatedCount(Class entityClass) { /** * Insert the object into the specified collection. - *

+ *
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* Insert is used to initially store the object into the database. To update an existing object use the save method. - *

+ *
* The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. @@ -1315,16 +1315,16 @@ default long estimatedCount(Class entityClass) { /** * Save the object to the collection for the entity type of the object to save. This will perform an insert if the * object is not already present, that is an 'upsert'. - *

+ *
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See * Spring's * Type Conversion" for more details. - *

+ *
* The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. @@ -1336,16 +1336,15 @@ default long estimatedCount(Class entityClass) { /** * Save the object to the specified collection. This will perform an insert if the object is not already present, that * is an 'upsert'. - *

+ *
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's Type - * Conversion" for more details. - *

+ * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. + * See Spring's Type Conversion for more details. + *
* The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java index fb0780c5c8..b3fb915687 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java @@ -338,7 +338,7 @@ public void setApplicationContext(ApplicationContext applicationContext) throws /** * Set the {@link EntityCallbacks} instance to use when invoking * {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the {@link BeforeSaveCallback}. - *

+ *
* Overrides potentially existing {@link EntityCallbacks}. * * @param entityCallbacks must not be {@literal null}. @@ -2664,7 +2664,7 @@ Document getMappedValidator(Validator validator, Class domainType) { /** * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. * The first document that matches the query is returned and also removed from the collection in the database. - *

+ *
* The query document is specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from @@ -3493,7 +3493,7 @@ public MongoDatabaseFactory getMongoDatabaseFactory() { /** * {@link MongoTemplate} extension bound to a specific {@link ClientSession} that is applied when interacting with the * server through the driver API. - *

+ *
* The prepare steps for {@link MongoDatabase} and {@link MongoCollection} proxy the target and invoke the desired * target method matching the actual arguments plus a {@link ClientSession}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java index 279f4184fb..d834af4b32 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java @@ -71,7 +71,7 @@ interface TerminatingChangeStream { /** * Start listening to changes. The stream will not be completed unless the {@link org.reactivestreams.Subscription} * is {@link org.reactivestreams.Subscription#cancel() canceled}. - *

+ *
* However, the stream may become dead, or invalid, if all watched collections, databases are dropped. */ Flux> listen(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java index 9a65090922..b06623197d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java @@ -91,10 +91,10 @@ interface TerminatingFind { * Get all matching elements using a {@link com.mongodb.CursorType#TailableAwait tailable cursor}. The stream will * not be completed unless the {@link org.reactivestreams.Subscription} is * {@link org.reactivestreams.Subscription#cancel() canceled}. - *

+ *
* However, the stream may become dead, or invalid, if either the query returns no match or the cursor returns the * document at the "end" of the collection and then the application deletes that document. - *

+ *
* A stream that is no longer in use must be {@link reactor.core.Disposable#dispose()} disposed} otherwise the * streams will linger and exhaust resources.
* NOTE: Requires a capped collection. @@ -106,7 +106,7 @@ interface TerminatingFind { /** * Get the number of matching elements. - *

+ *
* This method uses an * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java index fb1c260305..0f54bef685 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java @@ -59,7 +59,7 @@ * Implemented by {@link ReactiveMongoTemplate}. Not often used but a useful option for extensibility and testability * (as it can be easily mocked, stubbed, or be the target of a JDK proxy). Command execution using * {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}. - *

+ *
* NOTE: Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB * specific documentation to learn more about Multi * Document Transactions. @@ -121,7 +121,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { /** * Executes a {@link ReactiveDatabaseCallback} translating any exceptions as necessary. - *

+ *
* Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not @@ -133,7 +133,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { /** * Executes the given {@link ReactiveCollectionCallback} on the entity collection of the specified class. - *

+ *
* Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param entityClass class that determines the collection to use. Must not be {@literal null}. @@ -145,7 +145,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { /** * Executes the given {@link ReactiveCollectionCallback} on the collection of the given name. - *

+ *
* Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be @@ -159,7 +159,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { /** * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession} * provided by the given {@link Supplier} to each and every command issued against MongoDB. - *

+ *
* Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the * {@link ClientSession} when done. @@ -178,7 +178,7 @@ default ReactiveSessionScoped withSession(Supplier sessionProvide /** * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding a new {@link ClientSession} * with given {@literal sessionOptions} to each and every command issued against MongoDB. - *

+ *
* Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the * {@link ClientSession} when done. @@ -192,7 +192,7 @@ default ReactiveSessionScoped withSession(Supplier sessionProvide /** * Obtain a {@link ClientSession session} bound instance of {@link ReactiveSessionScoped} binding the * {@link ClientSession} provided by the given {@link Publisher} to each and every command issued against MongoDB. - *

+ *
* Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the * {@link ClientSession} when done. @@ -205,7 +205,7 @@ default ReactiveSessionScoped withSession(Supplier sessionProvide /** * Obtain a {@link ClientSession} bound instance of {@link ReactiveMongoOperations}. - *

+ *
* Note: It is up to the caller to manage the {@link ClientSession} lifecycle. * * @param session must not be {@literal null}. @@ -218,7 +218,7 @@ default ReactiveSessionScoped withSession(Supplier sessionProvide * Initiate a new {@link ClientSession} and obtain a {@link ClientSession session} bound instance of * {@link ReactiveSessionScoped}. Starts the transaction and adds the {@link ClientSession} to each and every command * issued against MongoDB. - *

+ *
* Each {@link ReactiveSessionScoped#execute(ReactiveSessionCallback) execution} initiates a new managed transaction * that is {@link ClientSession#commitTransaction() committed} on success. Transactions are * {@link ClientSession#abortTransaction() rolled back} upon errors. @@ -233,7 +233,7 @@ default ReactiveSessionScoped withSession(Supplier sessionProvide * Obtain a {@link ClientSession session} bound instance of {@link ReactiveSessionScoped}, start the transaction and * bind the {@link ClientSession} provided by the given {@link Publisher} to each and every command issued against * MongoDB. - *

+ *
* Each {@link ReactiveSessionScoped#execute(ReactiveSessionCallback) execution} initiates a new managed transaction * that is {@link ClientSession#commitTransaction() committed} on success. Transactions are * {@link ClientSession#abortTransaction() rolled back} upon errors. @@ -293,7 +293,7 @@ Mono> createCollection(Class entityClass, * created on first interaction with the server. Collections can be explicitly created via * {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class) * exists} first. - *

+ *
* Translate any exceptions as necessary. * * @param collectionName name of the collection. @@ -303,7 +303,7 @@ Mono> createCollection(Class entityClass, /** * Check to see if a collection with a name indicated by the entity class exists. - *

+ *
* Translate any exceptions as necessary. * * @param entityClass class that determines the name of the collection. Must not be {@literal null}. @@ -313,7 +313,7 @@ Mono> createCollection(Class entityClass, /** * Check to see if a collection with a given name exists. - *

+ *
* Translate any exceptions as necessary. * * @param collectionName name of the collection. Must not be {@literal null}. @@ -323,7 +323,7 @@ Mono> createCollection(Class entityClass, /** * Drop the collection with the name indicated by the entity class. - *

+ *
* Translate any exceptions as necessary. * * @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}. @@ -332,7 +332,7 @@ Mono> createCollection(Class entityClass, /** * Drop the collection with the given name. - *

+ *
* Translate any exceptions as necessary. * * @param collectionName name of the collection to drop/delete. @@ -341,10 +341,10 @@ Mono> createCollection(Class entityClass, /** * Query for a {@link Flux} of objects of type T from the collection used by the entity class. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -355,10 +355,10 @@ Mono> createCollection(Class entityClass, /** * Query for a {@link Flux} of objects of type T from the specified collection. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -371,10 +371,10 @@ Mono> createCollection(Class entityClass, /** * Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the * specified type. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -388,10 +388,10 @@ Mono> createCollection(Class entityClass, /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified * type. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -435,10 +435,10 @@ Mono> createCollection(Class entityClass, /** * Map the results of an ad-hoc query on the collection for the entity class to a {@link Flux} of the specified type. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -451,10 +451,10 @@ Mono> createCollection(Class entityClass, /** * Map the results of an ad-hoc query on the specified collection to a {@link Flux} of the specified type. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -566,10 +566,10 @@ default Flux findDistinct(Query query, String field, String collection, C /** * Execute an aggregation operation. - *

+ *
* The raw results will be mapped to the given entity class and are returned as stream. The name of the * inputCollection is derived from the {@link TypedAggregation#getInputType() aggregation input type}. - *

+ *
* Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause * {@link IllegalArgumentException}. @@ -584,10 +584,10 @@ default Flux findDistinct(Query query, String field, String collection, C /** * Execute an aggregation operation. - *

+ *
* The raw results will be mapped to the given {@code ouputType}. The name of the inputCollection is derived from the * {@code inputType}. - *

+ *
* Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause * {@link IllegalArgumentException}. @@ -604,9 +604,9 @@ default Flux findDistinct(Query query, String field, String collection, C /** * Execute an aggregation operation. - *

+ *
* The raw results will be mapped to the given entity class. - *

+ *
* Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause * {@link IllegalArgumentException}. @@ -676,7 +676,7 @@ default Flux findDistinct(Query query, String field, String collection, C Flux> geoNear(NearQuery near, Class entityClass, String collectionName); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. * * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional @@ -691,7 +691,7 @@ default Flux findDistinct(Query query, String field, String collection, C Mono findAndModify(Query query, UpdateDefinition update, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. * * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional @@ -707,7 +707,7 @@ default Flux findDistinct(Query query, String field, String collection, C Mono findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. * @@ -725,7 +725,7 @@ default Flux findDistinct(Query query, String field, String collection, C Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. * @@ -746,7 +746,7 @@ Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOpt /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} * document.
* Options are defaulted to {@link FindAndReplaceOptions#empty()}.
@@ -764,7 +764,7 @@ default Mono findAndReplace(Query query, T replacement) { /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} * document.
* Options are defaulted to {@link FindAndReplaceOptions#empty()}.
@@ -783,7 +783,7 @@ default Mono findAndReplace(Query query, T replacement, String collection /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}. @@ -803,7 +803,7 @@ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOpt /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}. @@ -825,7 +825,7 @@ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOpt /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}. @@ -849,7 +849,7 @@ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOpt /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}. @@ -876,7 +876,7 @@ default Mono findAndReplace(Query query, S replacement, FindAndReplace /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}. @@ -902,9 +902,9 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions * Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the * specified type. The first document that matches the query is returned and also removed from the collection in the * database. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. - *

+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -918,10 +918,10 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified * type. The first document that matches the query is returned and also removed from the collection in the database. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -940,7 +940,7 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to * count all matches. - *

+ *
* This method uses an * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees @@ -962,7 +962,7 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to * count all matches. - *

+ *
* This method uses an * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees @@ -983,7 +983,7 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to * count all matches. - *

+ *
* This method uses an * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees @@ -1001,7 +1001,7 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions /** * Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type}, * based on collection statistics. - *

+ *
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside * transactions. * @@ -1017,7 +1017,7 @@ default Mono estimatedCount(Class entityClass) { /** * Estimate the number of documents in the given collection based on collection statistics. - *

+ *
* Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside * transactions. * @@ -1029,17 +1029,17 @@ default Mono estimatedCount(Class entityClass) { /** * Insert the object into the collection for the entity type of the object to save. - *

+ *
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. - *

+ *
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See * Spring's * Type Conversion" for more details. - *

+ *
* Insert is used to initially store the object into the database. To update an existing object use the save method. - *

+ *
* The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. @@ -1050,12 +1050,12 @@ default Mono estimatedCount(Class entityClass) { /** * Insert the object into the specified collection. - *

+ *
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* Insert is used to initially store the object into the database. To update an existing object use the save method. - *

+ *
* The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. @@ -1094,15 +1094,15 @@ default Mono estimatedCount(Class entityClass) { /** * Insert the object into the collection for the entity type of the object to save. - *

+ *
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. - *

+ *
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See * Spring's * Type Conversion" for more details. - *

+ *
* Insert is used to initially store the object into the database. To update an existing object use the save method. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. @@ -1140,16 +1140,16 @@ default Mono estimatedCount(Class entityClass) { /** * Save the object to the collection for the entity type of the object to save. This will perform an insert if the * object is not already present, that is an 'upsert'. - *

+ *
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See * Spring's * Type Conversion" for more details. - *

+ *
* The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. @@ -1161,15 +1161,14 @@ default Mono estimatedCount(Class entityClass) { /** * Save the object to the specified collection. This will perform an insert if the object is not already present, that * is an 'upsert'. - *

+ *
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's Type - * Conversion" for more details. + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. + * See Spring's Type Conversion for more details. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. @@ -1181,15 +1180,14 @@ default Mono estimatedCount(Class entityClass) { /** * Save the object to the collection for the entity type of the object to save. This will perform an insert if the * object is not already present, that is an 'upsert'. - *

+ *
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See - * Spring's - * Type Conversion" for more details. + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. + * See Spring's Type Conversion for more details. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @return the saved object. @@ -1199,17 +1197,16 @@ default Mono estimatedCount(Class entityClass) { /** * Save the object to the specified collection. This will perform an insert if the object is not already present, that * is an 'upsert'. - *

+ *
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's Type - * Conversion" for more details. + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. + * See Spring's Type Conversion for more details. * - * @param objectToSave the object to store in the collection. Must not be {@literal null}. + * @param objectToSave the object to store in the collReactiveMongoOperationsection. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. * @return the saved object. */ @@ -1481,10 +1478,10 @@ default Mono estimatedCount(Class entityClass) { * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is * {@link Subscription#cancel() canceled}. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -1500,10 +1497,10 @@ default Mono estimatedCount(Class entityClass) { * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is * {@link Subscription#cancel() canceled}. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ *
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -1520,10 +1517,10 @@ default Mono estimatedCount(Class entityClass) { * the configured default database via the reactive infrastructure. Use the optional provided {@link Aggregation} to * filter events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is * {@link Subscription#cancel() canceled}. - *

+ *
* The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload. - *

+ *
* Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} * for resuming change streams. * @@ -1544,10 +1541,10 @@ default Flux> changeStream(ChangeStreamOptions options, * the given collection via the reactive infrastructure. Use the optional provided {@link Aggregation} to filter * events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is * {@link Subscription#cancel() canceled}. - *

+ *
* The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload. - *

+ *
* Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} * for resuming change streams. * @@ -1569,10 +1566,10 @@ default Flux> changeStream(@Nullable String collectionN * Subscribe to a MongoDB Change Stream via the reactive * infrastructure. Use the optional provided {@link Aggregation} to filter events. The stream will not be completed * unless the {@link org.reactivestreams.Subscription} is {@link Subscription#cancel() canceled}. - *

+ *
* The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload. - *

+ *
* Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} * for resuming change streams. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java index 82a3d12260..a7d0113e8f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java @@ -362,7 +362,7 @@ public void setApplicationContext(ApplicationContext applicationContext) throws * Set the {@link ReactiveEntityCallbacks} instance to use when invoking * {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the * {@link ReactiveBeforeSaveCallback}. - *

+ *
* Overrides potentially existing {@link ReactiveEntityCallbacks}. * * @param entityCallbacks must not be {@literal null}. @@ -2537,7 +2537,7 @@ private Document getMappedValidator(Validator validator, Class domainType) { /** * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. * The first document that matches the query is returned and also removed from the collection in the database. - *

+ *
* The query document is specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from @@ -3390,7 +3390,7 @@ private static List toDocuments(Collection + *
* The prepare steps for {@link MongoDatabase} and {@link MongoCollection} proxy the target and invoke the desired * target method matching the actual arguments plus a {@link ClientSession}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java index c9b15324fc..8ac447eeb5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java @@ -32,7 +32,7 @@ public interface ReactiveSessionCallback { /** * Execute operations against a MongoDB instance via session bound {@link ReactiveMongoOperations}. The session is * inferred directly into the operation so that no further interaction is necessary. - *

+ *
* Please note that only Spring Data-specific abstractions like {@link ReactiveMongoOperations#find(Query, Class)} and * others are enhanced with the {@link com.mongodb.session.ClientSession}. When obtaining plain MongoDB gateway * objects like {@link com.mongodb.reactivestreams.client.MongoCollection} or diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java index 17c17edd24..2519a8bb1a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java @@ -33,7 +33,7 @@ public interface ReactiveSessionScoped { /** * Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}. - *

+ *
* It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() * closed} when done. * @@ -47,7 +47,7 @@ default Flux execute(ReactiveSessionCallback action) { /** * Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}. - *

+ *
* It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() * closed} when done. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java index 24ad1c5ffc..36f8113021 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java @@ -23,7 +23,7 @@ /** - * Script operations on {@link com.mongodb.DB} level. Allows interaction with server side JavaScript functions. + * Script operations on {@link com.mongodb.client.MongoDatabase} level. Allows interaction with server side JavaScript functions. * * @author Christoph Strobl * @author Oliver Gierke @@ -72,10 +72,10 @@ public interface ScriptOperations { Object call(String scriptName, Object... args); /** - * Checks {@link DB} for existence of {@link ServerSideJavaScript} with given name. + * Checks {@link com.mongodb.client.MongoDatabase} for existence of {@literal ServerSideJavaScript} with given name. * * @param scriptName must not be {@literal null} or empty. - * @return false if no {@link ServerSideJavaScript} with given name exists. + * @return false if no {@literal ServerSideJavaScript} with given name exists. */ boolean exists(String scriptName); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java index c12d4b1005..93d0c71378 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java @@ -31,7 +31,7 @@ public interface SessionCallback { /** * Execute operations against a MongoDB instance via session bound {@link MongoOperations}. The session is inferred * directly into the operation so that no further interaction is necessary. - *

+ *
* Please note that only Spring Data-specific abstractions like {@link MongoOperations#find(Query, Class)} and others * are enhanced with the {@link com.mongodb.session.ClientSession}. When obtaining plain MongoDB gateway objects like * {@link com.mongodb.client.MongoCollection} or {@link com.mongodb.client.MongoDatabase} via eg. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java index ead52ee15e..5bba65144a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java @@ -23,7 +23,7 @@ /** * Gateway interface to execute {@link ClientSession} bound operations against MongoDB via a {@link SessionCallback}. - *

+ *
* The very same bound {@link ClientSession} is used for all invocations of {@code execute} on the instance. * * @author Christoph Strobl @@ -34,7 +34,7 @@ public interface SessionScoped { /** * Executes the given {@link SessionCallback} within the {@link com.mongodb.session.ClientSession}. - *

+ *
* It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() * closed} when done. * @@ -49,7 +49,7 @@ default T execute(SessionCallback action) { /** * Executes the given {@link SessionCallback} within the {@link com.mongodb.session.ClientSession}. - *

+ *
* It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() * closed} when done. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java index 614489692c..e4894fbef0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java @@ -227,7 +227,7 @@ public static String previousOperation() { /** * Obtain an {@link AddFieldsOperationBuilder builder} instance to create a new {@link AddFieldsOperation}. - *

+ *
* Starting in version 4.2, MongoDB adds a new aggregation pipeline stage {@link AggregationUpdate#set $set} that is * an alias for {@code $addFields}. * @@ -726,7 +726,7 @@ public AggregationPipeline getPipeline() { /** * Converts this {@link Aggregation} specification to a {@link Document}. - *

+ *
* MongoDB requires as of 3.6 cursor-based aggregation. Use {@link #toPipeline(AggregationOperationContext)} to render * an aggregation pipeline. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java index 14fa8c48d1..e406f57874 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java @@ -24,15 +24,15 @@ * expression.
*
* Samples:
- * *

+ * 
  * // { $and: [ { $gt: [ "$qty", 100 ] }, { $lt: [ "$qty", 250 ] } ] }
  * expressionOf("qty > 100 && qty < 250);
  *
  * // { $cond : { if : { $gte : [ "$a", 42 ]}, then : "answer", else : "no-answer" } }
  * expressionOf("cond(a >= 42, 'answer', 'no-answer')");
- * 
* + *
* * @author Christoph Strobl * @author Mark Paluch diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java index e69531e036..3cbb5f8735 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java @@ -71,8 +71,7 @@ * * @author Christoph Strobl * @author Mark Paluch - * @see MongoDB + * @see MongoDB * Reference Documentation * @since 3.0 */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 9c9132e679..0fbfcac411 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -825,7 +825,7 @@ public ATan atan() { * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by * the given numeric value in the argument. * - * @param the numeric value + * @param value the numeric value * @return new instance of {@link ATan2}. * @since 3.3 */ @@ -839,7 +839,7 @@ public ATan2 atan2(Number value) { * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by * the given field reference in the argument. * - * @param the numeric value + * @param fieldReference the numeric value * @return new instance of {@link ATan2}. * @since 3.3 */ @@ -853,7 +853,7 @@ public ATan2 atan2(String fieldReference) { * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by * the given {@link AggregationExpression} in the argument. * - * @param the numeric value + * @param expression the expression evaluating to a numeric value * @return new instance of {@link ATan2}. * @since 3.3 */ @@ -2169,7 +2169,7 @@ private Sin(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in * {@link AngularUnit#RADIANS radians}. - *

+ *
* Use {@code sinhOf("angle", DEGREES)} as shortcut for * *

@@ -2282,7 +2282,7 @@ public static Sinh sinhOf(String fieldReference) {
 		/**
 		 * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in
 		 * the given {@link AngularUnit unit}.
-		 * 

+ *
* Use {@code sinhOf("angle", DEGREES)} as shortcut for * *

@@ -2302,7 +2302,7 @@ public static Sinh sinhOf(String fieldReference, AngularUnit unit) {
 		/**
 		 * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in
 		 * {@link AngularUnit#RADIANS}.
-		 * 

+ *
* Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. * @@ -2386,7 +2386,7 @@ public static ASin asinOf(String fieldReference) { /** * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. - *

+ *
* * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link ASin}. @@ -2436,7 +2436,7 @@ public static ASinh asinhOf(String fieldReference) { /** * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. - *

+ *
* * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link ASinh}. @@ -2478,7 +2478,7 @@ private Cos(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in * {@link AngularUnit#RADIANS radians}. - *

+ *
* Use {@code cosOf("angle", DEGREES)} as shortcut for * *

@@ -2589,7 +2589,7 @@ public static Cosh coshOf(String fieldReference) {
 		/**
 		 * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in
 		 * the given {@link AngularUnit unit}.
-		 * 

+ *
* Use {@code coshOf("angle", DEGREES)} as shortcut for * *

@@ -2607,7 +2607,7 @@ public static Cosh coshOf(String fieldReference, AngularUnit unit) {
 		/**
 		 * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in
 		 * {@link AngularUnit#RADIANS}.
-		 * 

+ *
* Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. * @@ -2680,7 +2680,7 @@ private Tan(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in * {@link AngularUnit#RADIANS radians}. - *

+ *
* Use {@code tanOf("angle", DEGREES)} as shortcut for * *

@@ -2859,7 +2859,7 @@ public static ATan2 valueOf(AggregationExpression expression) {
 		 * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are
 		 * the first and second values passed to the expression respectively.
 		 *
-		 * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a
+		 * @param fieldReference anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a
 		 *          numeric value.
 		 * @return new instance of {@link ATan2}.
 		 */
@@ -2873,7 +2873,7 @@ public ATan2 atan2of(String fieldReference) {
 		 * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in
 		 * {@link AngularUnit#RADIANS}.
 		 *
-		 * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a
+		 * @param expression anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a
 		 *          numeric value.
 		 * @return new instance of {@link ATan2}.
 		 */
@@ -2927,7 +2927,7 @@ public static Tanh tanhOf(String fieldReference) {
 		/**
 		 * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in
 		 * the given {@link AngularUnit unit}.
-		 * 

+ *
* Use {@code tanhOf("angle", DEGREES)} as shortcut for * *

@@ -2945,7 +2945,7 @@ public static Tanh tanhOf(String fieldReference, AngularUnit unit) {
 		/**
 		 * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in
 		 * {@link AngularUnit#RADIANS}.
-		 * 

+ *
* Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. * @@ -3029,7 +3029,7 @@ public static ATanh atanhOf(String fieldReference) { /** * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a value. - *

+ *
* * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link ATanh}. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java index 235c16befe..f5755346bb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java @@ -28,8 +28,7 @@ * We recommend to use the static factory method {@link Aggregation#bucketAuto(String, int)} instead of creating * instances of this class directly. * - * @see https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/ + * @see https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/ * @see BucketOperationSupport * @author Mark Paluch * @author Christoph Strobl @@ -248,8 +247,7 @@ public interface Granularity { /** * Supported MongoDB granularities. * - * @see https://docs.mongodb.com/manual/reference/operator/aggregation/bucketAuto/#granularity * @author Mark Paluch */ public enum Granularities implements Granularity { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java index 173fa4ece5..937ec029cb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java @@ -31,8 +31,7 @@ * We recommend to use the static factory method {@link Aggregation#bucket(String)} instead of creating instances of * this class directly. * - * @see https://docs.mongodb.org/manual/reference/aggregation/bucket/ + * @see https://docs.mongodb.org/manual/reference/aggregation/bucket/ * @see BucketOperationSupport * @author Mark Paluch * @since 1.10 diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java index 637ebd8d8f..3555ada8a7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java @@ -328,9 +328,9 @@ public Convert to(String stringTypeIdentifier) { *

1
*
double
*
2
- *
string + *
string
*
7
- *
objectId + *
objectId
*
8
*
bool
*
9
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java index e2b65aa7ff..95e63ac8f7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java @@ -24,8 +24,7 @@ * We recommend to use the static factory method {@link Aggregation#count()} instead of creating instances of this class * directly. * - * @see https://docs.mongodb.com/manual/reference/operator/aggregation/count/ + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/count/ * @author Mark Paluch * @since 1.10 */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java index 029b994f2e..d1e45a8b93 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java @@ -92,7 +92,7 @@ public static DateOperatorFactory zonedDateOf(AggregationExpression expression, /** * Take the given value as date. - *

+ *
* This can be one of: *