From 829eed7d6cad825b6e51eb7e6c1ac87d42a8d7d8 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 14 Apr 2021 14:30:11 +0200 Subject: [PATCH 001/983] Prepare next development iteration. See #3616 --- pom.xml | 2 +- spring-data-mongodb-benchmarks/pom.xml | 2 +- spring-data-mongodb-distribution/pom.xml | 2 +- spring-data-mongodb/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index afb8a87e61..32a80b17ac 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 3.2.0 + 3.3.0-SNAPSHOT pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index 43e83d8175..0033bd11d5 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 3.2.0 + 3.3.0-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index 8988a14c40..f62c8dc7f4 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -14,7 +14,7 @@ org.springframework.data spring-data-mongodb-parent - 3.2.0 + 3.3.0-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index e36a1bb829..c1efaea420 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 3.2.0 + 3.3.0-SNAPSHOT ../pom.xml From 7b33f56e334e265f6fd153ed52b2d59e75ce8524 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 14 Apr 2021 14:30:14 +0200 Subject: [PATCH 002/983] After release cleanups. See #3616 --- pom.xml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index 32a80b17ac..548a846177 100644 --- a/pom.xml +++ b/pom.xml @@ -15,7 +15,7 @@ org.springframework.data.build spring-data-parent - 2.5.0 + 2.6.0-SNAPSHOT @@ -26,7 +26,7 @@ multi spring-data-mongodb - 2.5.0 + 2.6.0-SNAPSHOT 4.2.3 ${mongo} 1.19 @@ -134,8 +134,8 @@ - spring-libs-release - https://repo.spring.io/libs-release + spring-libs-snapshot + https://repo.spring.io/libs-snapshot sonatype-libs-snapshot From 54f75e653b935df29b9c3470521a33d7d9fb087c Mon Sep 17 00:00:00 2001 From: "Greg L. Turnquist" Date: Fri, 16 Apr 2021 12:27:26 -0500 Subject: [PATCH 003/983] Migrate to main branch. See #3616. --- CI.adoc | 2 +- CONTRIBUTING.adoc | 2 +- Jenkinsfile | 10 +++++----- README.adoc | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/CI.adoc b/CI.adoc index c6cb467f2b..4e95939a34 100644 --- a/CI.adoc +++ b/CI.adoc @@ -1,6 +1,6 @@ = Continuous Integration -image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Moore%20(master)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] +image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Moore%20(main)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F2.1.x&subject=Lovelace%20(2.1.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F1.10.x&subject=Ingalls%20(1.10.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] diff --git a/CONTRIBUTING.adoc b/CONTRIBUTING.adoc index f007591467..740e8bd0bb 100644 --- a/CONTRIBUTING.adoc +++ b/CONTRIBUTING.adoc @@ -1,3 +1,3 @@ = Spring Data contribution guidelines -You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc[here]. +You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/main/CONTRIBUTING.adoc[here]. diff --git a/Jenkinsfile b/Jenkinsfile index 570994b47a..b2e907e259 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -3,7 +3,7 @@ pipeline { triggers { pollSCM 'H/10 * * * *' - upstream(upstreamProjects: "spring-data-commons/master", threshold: hudson.model.Result.SUCCESS) + upstream(upstreamProjects: "spring-data-commons/main", threshold: hudson.model.Result.SUCCESS) } options { @@ -68,7 +68,7 @@ pipeline { stage("test: baseline (jdk8)") { when { anyOf { - branch 'master' + branch 'main' not { triggeredBy 'UpstreamCause' } } } @@ -95,7 +95,7 @@ pipeline { stage("Test other configurations") { when { allOf { - branch 'master' + branch 'main' not { triggeredBy 'UpstreamCause' } } } @@ -168,7 +168,7 @@ pipeline { stage('Release to artifactory') { when { anyOf { - branch 'master' + branch 'main' not { triggeredBy 'UpstreamCause' } } } @@ -201,7 +201,7 @@ pipeline { stage('Publish documentation') { when { - branch 'master' + branch 'main' } agent { label 'data' diff --git a/README.adoc b/README.adoc index d6af80475b..e80d6ac4ef 100644 --- a/README.adoc +++ b/README.adoc @@ -1,6 +1,6 @@ image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://projects.spring.io/spring-data-mongodb#quick-start] -= Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmaster&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]] += Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] https://gitter.im/spring-projects/spring-data[image:https://badges.gitter.im/spring-projects/spring-data.svg[Gitter]] The primary goal of the https://projects.spring.io/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services. From 5bd9bcca75bced820540cb3616e0c5997f4fb0b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Petit?= Date: Sat, 17 Apr 2021 12:32:24 +0200 Subject: [PATCH 004/983] Fix bullet points in aggregation framework reference documentation. Closes: #3632 --- src/main/asciidoc/reference/mongodb.adoc | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc index a463689bd8..7e0ef51328 100644 --- a/src/main/asciidoc/reference/mongodb.adoc +++ b/src/main/asciidoc/reference/mongodb.adoc @@ -2444,9 +2444,8 @@ A `TypedAggregation`, just like an `Aggregation`, holds the instructions of the At runtime, field references get checked against the given input type, considering potential `@Field` annotations. [NOTE] ==== -Changed in 3.2 referencing none-xistent properties does no longer raise errors. To restore the previous behaviour use the `strictMapping` option of `AggregationOptions`. +Changed in 3.2 referencing non-existent properties does no longer raise errors. To restore the previous behaviour use the `strictMapping` option of `AggregationOptions`. ==== -+ * `AggregationDefinition` + An `AggregationDefinition` represents a MongoDB aggregation pipeline operation and describes the processing that should be performed in this aggregation step. Although you could manually create an `AggregationDefinition`, we recommend using the static factory methods provided by the `Aggregate` class to construct an `AggregateOperation`. From 2edc29f758bf586e6de626732e9cfb2d3dd8db06 Mon Sep 17 00:00:00 2001 From: "Greg L. Turnquist" Date: Thu, 22 Apr 2021 14:59:20 -0500 Subject: [PATCH 005/983] Authenticate with artifactory. See #3616. --- Jenkinsfile | 24 ++++-- LICENSE.txt | 202 +++++++++++++++++++++++++++++++++++++++++++++++++++ pom.xml | 5 -- settings.xml | 29 ++++++++ 4 files changed, 249 insertions(+), 11 deletions(-) create mode 100644 LICENSE.txt create mode 100644 settings.xml diff --git a/Jenkinsfile b/Jenkinsfile index b2e907e259..6c2c3e17fb 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -76,6 +76,9 @@ pipeline { label 'data' } options { timeout(time: 30, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c') + } steps { script { docker.withRegistry('', 'hub.docker.com-springbuildmaster') { @@ -85,7 +88,7 @@ pipeline { sh 'sleep 10' sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"' sh 'sleep 15' - sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B' + sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B' } } } @@ -105,6 +108,9 @@ pipeline { label 'data' } options { timeout(time: 30, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c') + } steps { script { docker.withRegistry('', 'hub.docker.com-springbuildmaster') { @@ -114,7 +120,7 @@ pipeline { sh 'sleep 10' sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"' sh 'sleep 15' - sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B' + sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B' } } } @@ -126,6 +132,9 @@ pipeline { label 'data' } options { timeout(time: 30, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c') + } steps { script { docker.withRegistry('', 'hub.docker.com-springbuildmaster') { @@ -135,7 +144,7 @@ pipeline { sh 'sleep 10' sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"' sh 'sleep 15' - sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw clean dependency:list test -Duser.name=jenkins -Dsort -U -B' + sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B' } } } @@ -147,6 +156,9 @@ pipeline { label 'data' } options { timeout(time: 30, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c') + } steps { script { docker.withRegistry('', 'hub.docker.com-springbuildmaster') { @@ -156,7 +168,7 @@ pipeline { sh 'sleep 10' sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"' sh 'sleep 15' - sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pjava11 clean dependency:list test -Duser.name=jenkins -Dsort -U -B' + sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pjava11 clean dependency:list test -Duser.name=jenkins -Dsort -U -B' } } } @@ -185,7 +197,7 @@ pipeline { script { docker.withRegistry('', 'hub.docker.com-springbuildmaster') { docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') { - sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,artifactory ' + + sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,artifactory ' + '-Dartifactory.server=https://repo.spring.io ' + "-Dartifactory.username=${ARTIFACTORY_USR} " + "-Dartifactory.password=${ARTIFACTORY_PSW} " + @@ -216,7 +228,7 @@ pipeline { script { docker.withRegistry('', 'hub.docker.com-springbuildmaster') { docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') { - sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -Pci,distribute ' + + sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,distribute ' + '-Dartifactory.server=https://repo.spring.io ' + "-Dartifactory.username=${ARTIFACTORY_USR} " + "-Dartifactory.password=${ARTIFACTORY_PSW} " + diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000000..ff77379631 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/pom.xml b/pom.xml index 548a846177..a6d5da9170 100644 --- a/pom.xml +++ b/pom.xml @@ -158,11 +158,6 @@ spring-libs-milestone https://repo.spring.io/libs-milestone - - bintray-plugins - bintray-plugins - https://jcenter.bintray.com - diff --git a/settings.xml b/settings.xml new file mode 100644 index 0000000000..b3227cc110 --- /dev/null +++ b/settings.xml @@ -0,0 +1,29 @@ + + + + + spring-plugins-release + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-snapshot + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-milestone + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-release + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + + \ No newline at end of file From ede6927b652ed5d95381af61436bd50dbebff7af Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 27 Apr 2021 10:45:43 +0200 Subject: [PATCH 006/983] Introduce template method for easier customization of fragments. Closes #3638. --- .../support/MongoRepositoryFactory.java | 25 +++++++++++++------ 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactory.java index 699e7b158d..5a023b2b09 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactory.java @@ -39,7 +39,6 @@ import org.springframework.data.repository.core.RepositoryMetadata; import org.springframework.data.repository.core.support.RepositoryComposition.RepositoryFragments; import org.springframework.data.repository.core.support.RepositoryFactorySupport; -import org.springframework.data.repository.core.support.RepositoryFragment; import org.springframework.data.repository.query.QueryLookupStrategy; import org.springframework.data.repository.query.QueryLookupStrategy.Key; import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; @@ -92,8 +91,21 @@ protected Class getRepositoryBaseClass(RepositoryMetadata metadata) { */ @Override protected RepositoryFragments getRepositoryFragments(RepositoryMetadata metadata) { + return getRepositoryFragments(metadata, operations); + } - RepositoryFragments fragments = RepositoryFragments.empty(); + /** + * Creates {@link RepositoryFragments} based on {@link RepositoryMetadata} to add Mongo-specific extensions. Typically + * adds a {@link QuerydslMongoPredicateExecutor} if the repository interface uses Querydsl. + *

+ * Can be overridden by subclasses to customize {@link RepositoryFragments}. + * + * @param metadata repository metadata. + * @param operations the MongoDB operations manager. + * @return + * @since 3.2.1 + */ + protected RepositoryFragments getRepositoryFragments(RepositoryMetadata metadata, MongoOperations operations) { boolean isQueryDslRepository = QUERY_DSL_PRESENT && QuerydslPredicateExecutor.class.isAssignableFrom(metadata.getRepositoryInterface()); @@ -105,14 +117,11 @@ protected RepositoryFragments getRepositoryFragments(RepositoryMetadata metadata "Cannot combine Querydsl and reactive repository support in a single interface"); } - MongoEntityInformation entityInformation = getEntityInformation(metadata.getDomainType(), - metadata); - - fragments = fragments.append(RepositoryFragment.implemented( - getTargetRepositoryViaReflection(QuerydslMongoPredicateExecutor.class, entityInformation, operations))); + return RepositoryFragments + .just(new QuerydslMongoPredicateExecutor<>(getEntityInformation(metadata.getDomainType()), operations)); } - return fragments; + return RepositoryFragments.empty(); } /* From 9a48e32565f67593f57aa2287eaf36a09328fc08 Mon Sep 17 00:00:00 2001 From: divya_jnu08 Date: Thu, 6 May 2021 18:49:31 +0530 Subject: [PATCH 007/983] Aggregation query method should be able to return Slice and Stream. Aggregation query methods can not return Slice and Stream. interface PersonRepository extends CrudReppsitory { @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : ?0 } } }") Slice groupByLastnameAnd(String property, Pageable page); @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }") Stream groupByLastnameAndFirstnamesAsStream(); } Closes #3543. Original pull request: #3645. --- .../repository/query/AggregationUtils.java | 22 +++++++++++++++ .../query/StringBasedAggregation.java | 27 +++++++++++++------ .../StringBasedAggregationUnitTests.java | 15 +++++------ 3 files changed, 48 insertions(+), 16 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AggregationUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AggregationUtils.java index b3cef1f6d9..a5a89cf9ce 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AggregationUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AggregationUtils.java @@ -145,6 +145,28 @@ static void appendLimitAndOffsetIfPresent(List aggregation aggregationPipeline.add(Aggregation.limit(pageable.getPageSize())); } + + /** + * Append {@code $skip} and {@code $limit} aggregation stage if {@link ConvertingParameterAccessor#getSort()} is + * present. + * + * @param aggregationPipeline + * @param accessor + */ + static void appendModifiedLimitAndOffsetIfPresent(List aggregationPipeline, + ConvertingParameterAccessor accessor) { + + Pageable pageable = accessor.getPageable(); + if (pageable.isUnpaged()) { + return; + } + + if (pageable.getOffset() > 0) { + aggregationPipeline.add(Aggregation.skip(pageable.getOffset())); + } + + aggregationPipeline.add(Aggregation.limit(pageable.getPageSize()+1)); + } /** * Extract a single entry from the given {@link Document}.
diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedAggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedAggregation.java index 4deb7d0d52..a4f8ed94bd 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedAggregation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedAggregation.java @@ -20,6 +20,8 @@ import java.util.stream.Collectors; import org.bson.Document; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.SliceImpl; import org.springframework.data.mapping.model.SpELExpressionEvaluator; import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; import org.springframework.data.mongodb.core.MongoOperations; @@ -76,18 +78,17 @@ public StringBasedAggregation(MongoQueryMethod method, MongoOperations mongoOper protected Object doExecute(MongoQueryMethod method, ResultProcessor resultProcessor, ConvertingParameterAccessor accessor, Class typeToRead) { - if (method.isPageQuery() || method.isSliceQuery()) { - throw new InvalidMongoDbApiUsageException(String.format( - "Repository aggregation method '%s' does not support '%s' return type. Please use eg. 'List' instead.", - method.getName(), method.getReturnType().getType().getSimpleName())); - } - Class sourceType = method.getDomainClass(); Class targetType = typeToRead; List pipeline = computePipeline(method, accessor); AggregationUtils.appendSortIfPresent(pipeline, accessor, typeToRead); - AggregationUtils.appendLimitAndOffsetIfPresent(pipeline, accessor); + + if (method.isSliceQuery()) { + AggregationUtils.appendModifiedLimitAndOffsetIfPresent(pipeline, accessor); + }else{ + AggregationUtils.appendLimitAndOffsetIfPresent(pipeline, accessor); + } boolean isSimpleReturnType = isSimpleReturnType(typeToRead); boolean isRawAggregationResult = ClassUtils.isAssignable(AggregationResults.class, typeToRead); @@ -118,7 +119,17 @@ protected Object doExecute(MongoQueryMethod method, ResultProcessor resultProces return result.getMappedResults(); } - + + List mappedResults = result.getMappedResults(); + + if(method.isSliceQuery()) { + + Pageable pageable = accessor.getPageable(); + int pageSize = pageable.getPageSize(); + boolean hasNext = mappedResults.size() > pageSize; + return new SliceImpl(hasNext ? mappedResults.subList(0, pageSize) : mappedResults, pageable, hasNext); + } + Object uniqueResult = result.getUniqueMappedResult(); return isSimpleReturnType diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationUnitTests.java index 00506229ea..9a5f058490 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationUnitTests.java @@ -36,6 +36,8 @@ import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.SliceImpl; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; @@ -220,13 +222,10 @@ public void aggregateWithCollationParameter() { } @Test // DATAMONGO-2506 - public void aggregateRaisesErrorOnInvalidReturnType() { - - StringBasedAggregation sba = createAggregationForMethod("invalidPageReturnType", Pageable.class); - assertThatExceptionOfType(InvalidMongoDbApiUsageException.class) // - .isThrownBy(() -> sba.execute(new Object[] { PageRequest.of(0, 1) })) // - .withMessageContaining("invalidPageReturnType") // - .withMessageContaining("Page"); + public void aggregationWithSliceReturnType() { + StringBasedAggregation sba = createAggregationForMethod("aggregationWithSliceReturnType", Pageable.class); + Object result = sba.execute(new Object[] { PageRequest.of(0, 1) }); + assertThat(result.getClass()).isEqualTo(SliceImpl.class); } @Test // DATAMONGO-2557 @@ -319,7 +318,7 @@ private interface SampleRepository extends Repository { PersonAggregate aggregateWithCollation(Collation collation); @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) - Page invalidPageReturnType(Pageable page); + Slice aggregationWithSliceReturnType(Pageable page); @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) String simpleReturnType(); From 90d03d92d8f9ecd25b97a685f9ea0cfbfa7be23c Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 11 May 2021 11:47:36 +0200 Subject: [PATCH 008/983] Polishing. Let appendLimitAndOffsetIfPresent accept unary operators for adjusting limit/offset values instead of appendModifiedLimitAndOffsetIfPresent. Apply simple type extraction for Slice. Add support for aggregation result streaming. Extend tests, add author tags, update docs. See #3543. Original pull request: #3645. --- .../repository/query/AggregationUtils.java | 29 +++-- .../query/StringBasedAggregation.java | 72 +++++++++---- ...tractPersonRepositoryIntegrationTests.java | 26 +++-- .../mongodb/repository/PersonRepository.java | 6 ++ .../StringBasedAggregationUnitTests.java | 100 ++++++++++++++---- .../mongo-repositories-aggregation.adoc | 30 +++--- 6 files changed, 185 insertions(+), 78 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AggregationUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AggregationUtils.java index a5a89cf9ce..ebd07803c9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AggregationUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/AggregationUtils.java @@ -18,6 +18,8 @@ import java.time.Duration; import java.util.List; import java.util.Map; +import java.util.function.IntUnaryOperator; +import java.util.function.LongUnaryOperator; import org.bson.Document; import org.springframework.data.domain.Pageable; @@ -42,6 +44,7 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Divya Srivastava * @since 2.2 */ abstract class AggregationUtils { @@ -133,28 +136,22 @@ static void appendSortIfPresent(List aggregationPipeline, */ static void appendLimitAndOffsetIfPresent(List aggregationPipeline, ConvertingParameterAccessor accessor) { - - Pageable pageable = accessor.getPageable(); - if (pageable.isUnpaged()) { - return; - } - - if (pageable.getOffset() > 0) { - aggregationPipeline.add(Aggregation.skip(pageable.getOffset())); - } - - aggregationPipeline.add(Aggregation.limit(pageable.getPageSize())); + appendLimitAndOffsetIfPresent(aggregationPipeline, accessor, LongUnaryOperator.identity(), + IntUnaryOperator.identity()); } - + /** * Append {@code $skip} and {@code $limit} aggregation stage if {@link ConvertingParameterAccessor#getSort()} is * present. * * @param aggregationPipeline * @param accessor + * @param offsetOperator + * @param limitOperator + * @since 3.3 */ - static void appendModifiedLimitAndOffsetIfPresent(List aggregationPipeline, - ConvertingParameterAccessor accessor) { + static void appendLimitAndOffsetIfPresent(List aggregationPipeline, + ConvertingParameterAccessor accessor, LongUnaryOperator offsetOperator, IntUnaryOperator limitOperator) { Pageable pageable = accessor.getPageable(); if (pageable.isUnpaged()) { @@ -162,10 +159,10 @@ static void appendModifiedLimitAndOffsetIfPresent(List agg } if (pageable.getOffset() > 0) { - aggregationPipeline.add(Aggregation.skip(pageable.getOffset())); + aggregationPipeline.add(Aggregation.skip(offsetOperator.applyAsLong(pageable.getOffset()))); } - aggregationPipeline.add(Aggregation.limit(pageable.getPageSize()+1)); + aggregationPipeline.add(Aggregation.limit(limitOperator.applyAsInt(pageable.getPageSize()))); } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedAggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedAggregation.java index a4f8ed94bd..713ce308ad 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedAggregation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/query/StringBasedAggregation.java @@ -17,9 +17,11 @@ import java.util.ArrayList; import java.util.List; -import java.util.stream.Collectors; +import java.util.function.LongUnaryOperator; +import java.util.stream.Stream; import org.bson.Document; + import org.springframework.data.domain.Pageable; import org.springframework.data.domain.SliceImpl; import org.springframework.data.mapping.model.SpELExpressionEvaluator; @@ -42,7 +44,12 @@ import org.springframework.util.ClassUtils; /** + * {@link AbstractMongoQuery} implementation to run string-based aggregations using + * {@link org.springframework.data.mongodb.repository.Aggregation}. + * * @author Christoph Strobl + * @author Divya Srivastava + * @author Mark Paluch * @since 2.2 */ public class StringBasedAggregation extends AbstractMongoQuery { @@ -64,6 +71,12 @@ public StringBasedAggregation(MongoQueryMethod method, MongoOperations mongoOper ExpressionParser expressionParser, QueryMethodEvaluationContextProvider evaluationContextProvider) { super(method, mongoOperations, expressionParser, evaluationContextProvider); + if (method.isPageQuery()) { + throw new InvalidMongoDbApiUsageException(String.format( + "Repository aggregation method '%s' does not support '%s' return type. Please use 'Slice' or 'List' instead.", + method.getName(), method.getReturnType().getType().getSimpleName())); + } + this.mongoOperations = mongoOperations; this.mongoConverter = mongoOperations.getConverter(); this.expressionParser = expressionParser; @@ -83,10 +96,11 @@ protected Object doExecute(MongoQueryMethod method, ResultProcessor resultProces List pipeline = computePipeline(method, accessor); AggregationUtils.appendSortIfPresent(pipeline, accessor, typeToRead); - + if (method.isSliceQuery()) { - AggregationUtils.appendModifiedLimitAndOffsetIfPresent(pipeline, accessor); - }else{ + AggregationUtils.appendLimitAndOffsetIfPresent(pipeline, accessor, LongUnaryOperator.identity(), + limit -> limit + 1); + } else { AggregationUtils.appendLimitAndOffsetIfPresent(pipeline, accessor); } @@ -96,40 +110,45 @@ protected Object doExecute(MongoQueryMethod method, ResultProcessor resultProces if (isSimpleReturnType) { targetType = Document.class; } else if (isRawAggregationResult) { + + // 🙈 targetType = method.getReturnType().getRequiredActualType().getRequiredComponentType().getType(); } AggregationOptions options = computeOptions(method, accessor); TypedAggregation aggregation = new TypedAggregation<>(sourceType, pipeline, options); - AggregationResults result = mongoOperations.aggregate(aggregation, targetType); + if (method.isStreamQuery()) { + + Stream stream = mongoOperations.aggregateStream(aggregation, targetType).stream(); + + if (isSimpleReturnType) { + return stream.map(it -> AggregationUtils.extractSimpleTypeResult((Document) it, typeToRead, mongoConverter)); + } + + return stream; + } + + AggregationResults result = (AggregationResults) mongoOperations.aggregate(aggregation, targetType); if (isRawAggregationResult) { return result; } + List results = result.getMappedResults(); if (method.isCollectionQuery()) { + return isSimpleReturnType ? convertResults(typeToRead, results) : results; + } - if (isSimpleReturnType) { - - return result.getMappedResults().stream() - .map(it -> AggregationUtils.extractSimpleTypeResult((Document) it, typeToRead, mongoConverter)) - .collect(Collectors.toList()); - } + if (method.isSliceQuery()) { - return result.getMappedResults(); - } - - List mappedResults = result.getMappedResults(); - - if(method.isSliceQuery()) { - Pageable pageable = accessor.getPageable(); int pageSize = pageable.getPageSize(); - boolean hasNext = mappedResults.size() > pageSize; - return new SliceImpl(hasNext ? mappedResults.subList(0, pageSize) : mappedResults, pageable, hasNext); + List resultsToUse = isSimpleReturnType ? convertResults(typeToRead, results) : results; + boolean hasNext = resultsToUse.size() > pageSize; + return new SliceImpl<>(hasNext ? resultsToUse.subList(0, pageSize) : resultsToUse, pageable, hasNext); } - + Object uniqueResult = result.getUniqueMappedResult(); return isSimpleReturnType @@ -137,6 +156,17 @@ protected Object doExecute(MongoQueryMethod method, ResultProcessor resultProces : uniqueResult; } + private List convertResults(Class typeToRead, List mappedResults) { + + List list = new ArrayList<>(mappedResults.size()); + for (Object it : mappedResults) { + Object extractSimpleTypeResult = AggregationUtils.extractSimpleTypeResult((Document) it, typeToRead, + mongoConverter); + list.add(extractSimpleTypeResult); + } + return list; + } + private boolean isSimpleReturnType(Class targetType) { return MongoSimpleTypes.HOLDER.isSimpleType(targetType); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java index 74a48fc679..9ab37e3ff5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java @@ -43,6 +43,7 @@ import org.springframework.data.domain.Example; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Range; import org.springframework.data.domain.Slice; import org.springframework.data.domain.Sort; @@ -1269,13 +1270,16 @@ void annotatedQueryShouldAllowPositionalParameterInFieldsProjectionWithDbRef() { @Test // DATAMONGO-2153 void findListOfSingleValue() { - assertThat(repository.findAllLastnames()) // - .contains("Lessard") // - .contains("Keys") // - .contains("Tinsley") // - .contains("Beauford") // - .contains("Moore") // - .contains("Matthews"); // + assertThat(repository.findAllLastnames()).contains("Lessard", "Keys", "Tinsley", "Beauford", "Moore", "Matthews"); + } + + @Test // GH-3543 + void findStreamOfSingleValue() { + + try (Stream lastnames = repository.findAllLastnamesAsStream()) { + assertThat(lastnames) // + .contains("Lessard", "Keys", "Tinsley", "Beauford", "Moore", "Matthews"); + } } @Test // DATAMONGO-2153 @@ -1290,6 +1294,14 @@ void annotatedAggregationWithPlaceholderValue() { .contains(new PersonAggregate("Matthews", Arrays.asList("Dave", "Oliver August"))); } + @Test // GH-3543 + void annotatedAggregationWithPlaceholderAsSlice() { + + Slice slice = repository.groupByLastnameAndAsSlice("firstname", Pageable.ofSize(5)); + assertThat(slice).hasSize(5); + assertThat(slice.hasNext()).isTrue(); + } + @Test // DATAMONGO-2153 void annotatedAggregationWithSort() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java index c3b765c910..314655e781 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java @@ -379,9 +379,15 @@ Page findByCustomQueryLastnameAndAddressStreetInList(String lastname, Li @Aggregation("{ '$project': { '_id' : '$lastname' } }") List findAllLastnames(); + @Aggregation("{ '$project': { '_id' : '$lastname' } }") + Stream findAllLastnamesAsStream(); + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") List groupByLastnameAnd(String property); + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + Slice groupByLastnameAndAsSlice(String property, Pageable pageable); + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") List groupByLastnameAnd(String property, Sort sort); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationUnitTests.java index 9a5f058490..995442f0fb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/StringBasedAggregationUnitTests.java @@ -26,6 +26,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.stream.Stream; import org.bson.Document; import org.junit.jupiter.api.BeforeEach; @@ -36,11 +37,12 @@ import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; -import org.springframework.data.domain.Slice; -import org.springframework.data.domain.SliceImpl; + import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.SliceImpl; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.mongodb.InvalidMongoDbApiUsageException; @@ -64,6 +66,7 @@ import org.springframework.data.repository.Repository; import org.springframework.data.repository.core.support.DefaultRepositoryMetadata; import org.springframework.data.repository.query.QueryMethodEvaluationContextProvider; +import org.springframework.data.util.CloseableIterator; import org.springframework.expression.spel.standard.SpelExpressionParser; import org.springframework.lang.Nullable; import org.springframework.util.ClassUtils; @@ -75,17 +78,18 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Divya Srivastava */ @ExtendWith(MockitoExtension.class) @MockitoSettings(strictness = Strictness.LENIENT) public class StringBasedAggregationUnitTests { - SpelExpressionParser PARSER = new SpelExpressionParser(); + private SpelExpressionParser PARSER = new SpelExpressionParser(); @Mock MongoOperations operations; @Mock DbRefResolver dbRefResolver; @Mock AggregationResults aggregationResults; - MongoConverter converter; + private MongoConverter converter; private static final String RAW_SORT_STRING = "{ '$sort' : { 'lastname' : -1 } }"; private static final String RAW_GROUP_BY_LASTNAME_STRING = "{ '$group': { '_id' : '$lastname', 'names' : { '$addToSet' : '$firstname' } } }"; @@ -96,7 +100,7 @@ public class StringBasedAggregationUnitTests { private static final Document GROUP_BY_LASTNAME = Document.parse(RAW_GROUP_BY_LASTNAME_STRING); @BeforeEach - public void setUp() { + void setUp() { converter = new MappingMongoConverter(dbRefResolver, new MongoMappingContext()); when(operations.getConverter()).thenReturn(converter); @@ -105,7 +109,7 @@ public void setUp() { } @Test // DATAMONGO-2153 - public void plainStringAggregation() { + void plainStringAggregation() { AggregationInvocation invocation = executeAggregation("plainStringAggregation"); @@ -115,7 +119,7 @@ public void plainStringAggregation() { } @Test // DATAMONGO-2153, DATAMONGO-2449 - public void plainStringAggregationConsidersMeta() { + void plainStringAggregationConsidersMeta() { AggregationInvocation invocation = executeAggregation("plainStringAggregation"); AggregationOptions options = invocation.aggregation.getOptions(); @@ -127,7 +131,7 @@ public void plainStringAggregationConsidersMeta() { } @Test // DATAMONGO-2153, DATAMONGO-2449 - public void returnSingleObject() { + void returnSingleObject() { PersonAggregate expected = new PersonAggregate(); when(aggregationResults.getUniqueMappedResult()).thenReturn(Collections.singletonList(expected)); @@ -144,7 +148,7 @@ public void returnSingleObject() { } @Test // DATAMONGO-2153 - public void returnSingleObjectThrowsError() { + void returnSingleObjectThrowsError() { when(aggregationResults.getUniqueMappedResult()).thenThrow(new IllegalArgumentException("o_O")); @@ -153,7 +157,7 @@ public void returnSingleObjectThrowsError() { } @Test // DATAMONGO-2153 - public void returnCollection() { + void returnCollection() { List expected = Collections.singletonList(new PersonAggregate()); when(aggregationResults.getMappedResults()).thenReturn(expected); @@ -162,7 +166,7 @@ public void returnCollection() { } @Test // GH-3623 - public void returnNullWhenSingleResultIsNotPresent() { + void returnNullWhenSingleResultIsNotPresent() { when(aggregationResults.getMappedResults()).thenReturn(Collections.emptyList()); @@ -170,12 +174,12 @@ public void returnNullWhenSingleResultIsNotPresent() { } @Test // DATAMONGO-2153 - public void returnRawResultType() { + void returnRawResultType() { assertThat(executeAggregation("returnRawResultType").result).isEqualTo(aggregationResults); } @Test // DATAMONGO-2153 - public void plainStringAggregationWithSortParameter() { + void plainStringAggregationWithSortParameter() { AggregationInvocation invocation = executeAggregation("plainStringAggregation", Sort.by(Direction.DESC, "lastname")); @@ -186,7 +190,7 @@ public void plainStringAggregationWithSortParameter() { } @Test // DATAMONGO-2153 - public void replaceParameter() { + void replaceParameter() { AggregationInvocation invocation = executeAggregation("parameterReplacementAggregation", "firstname"); @@ -196,7 +200,7 @@ public void replaceParameter() { } @Test // DATAMONGO-2153 - public void replaceSpElParameter() { + void replaceSpElParameter() { AggregationInvocation invocation = executeAggregation("spelParameterReplacementAggregation", "firstname"); @@ -206,7 +210,7 @@ public void replaceSpElParameter() { } @Test // DATAMONGO-2153 - public void aggregateWithCollation() { + void aggregateWithCollation() { AggregationInvocation invocation = executeAggregation("aggregateWithCollation"); @@ -214,18 +218,48 @@ public void aggregateWithCollation() { } @Test // DATAMONGO-2153 - public void aggregateWithCollationParameter() { + void aggregateWithCollationParameter() { AggregationInvocation invocation = executeAggregation("aggregateWithCollation", Collation.of("en_US")); assertThat(collationOf(invocation)).isEqualTo(Collation.of("en_US")); } - @Test // DATAMONGO-2506 - public void aggregationWithSliceReturnType() { + @Test // GH-3543 + void aggregationWithSliceReturnType() { + StringBasedAggregation sba = createAggregationForMethod("aggregationWithSliceReturnType", Pageable.class); + Object result = sba.execute(new Object[] { PageRequest.of(0, 1) }); - assertThat(result.getClass()).isEqualTo(SliceImpl.class); + + assertThat(result).isInstanceOf(Slice.class); + } + + @Test // GH-3543 + void aggregationWithStreamReturnType() { + + when(operations.aggregateStream(any(TypedAggregation.class), any())).thenReturn(new CloseableIterator() { + @Override + public void close() { + + } + + @Override + public boolean hasNext() { + return false; + } + + @Override + public Object next() { + return null; + } + }); + + StringBasedAggregation sba = createAggregationForMethod("aggregationWithStreamReturnType", Pageable.class); + + Object result = sba.execute(new Object[] { PageRequest.of(0, 1) }); + + assertThat(result).isInstanceOf(Stream.class); } @Test // DATAMONGO-2557 @@ -235,6 +269,21 @@ void aggregationRetrievesCodecFromDriverJustOnceForMultipleAggregationOperations verify(operations).execute(any()); } + @Test // DATAMONGO-2506 + void aggregateRaisesErrorOnInvalidReturnType() { + + Method method = ClassUtils.getMethod(UnsupportedRepository.class, "pageIsUnsupported", Pageable.class); + ProjectionFactory factory = new SpelAwareProxyProjectionFactory(); + MongoQueryMethod queryMethod = new MongoQueryMethod(method, new DefaultRepositoryMetadata(SampleRepository.class), + factory, converter.getMappingContext()); + + assertThatExceptionOfType(InvalidMongoDbApiUsageException.class) // + .isThrownBy(() -> new StringBasedAggregation(queryMethod, operations, PARSER, + QueryMethodEvaluationContextProvider.DEFAULT)) // + .withMessageContaining("pageIsUnsupported") // + .withMessageContaining("Page"); + } + private AggregationInvocation executeAggregation(String name, Object... args) { Class[] argTypes = Arrays.stream(args).map(Object::getClass).toArray(Class[]::new); @@ -320,16 +369,25 @@ private interface SampleRepository extends Repository { @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) Slice aggregationWithSliceReturnType(Pageable page); + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + Stream aggregationWithStreamReturnType(Pageable page); + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) String simpleReturnType(); } + private interface UnsupportedRepository extends Repository { + + @Aggregation(RAW_GROUP_BY_LASTNAME_STRING) + Page pageIsUnsupported(Pageable page); + } + static class PersonAggregate { } @Value - static class AggregationInvocation { + private static class AggregationInvocation { TypedAggregation aggregation; Class targetType; diff --git a/src/main/asciidoc/reference/mongo-repositories-aggregation.adoc b/src/main/asciidoc/reference/mongo-repositories-aggregation.adoc index 6342e9ae48..83e0624be8 100644 --- a/src/main/asciidoc/reference/mongo-repositories-aggregation.adoc +++ b/src/main/asciidoc/reference/mongo-repositories-aggregation.adoc @@ -21,19 +21,22 @@ public interface PersonRepository extends CrudReppsitory { List groupByLastnameAnd(String property); <3> @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : ?0 } } }") - List groupByLastnameAnd(String property, Pageable page); <4> + Slice groupByLastnameAnd(String property, Pageable page); <4> + + @Aggregation("{ $group: { _id : $lastname, names : { $addToSet : $firstname } } }") + Stream groupByLastnameAndFirstnamesAsStream(); <5> @Aggregation("{ $group : { _id : null, total : { $sum : $age } } }") - SumValue sumAgeUsingValueWrapper(); <5> + SumValue sumAgeUsingValueWrapper(); <6> @Aggregation("{ $group : { _id : null, total : { $sum : $age } } }") - Long sumAge(); <6> + Long sumAge(); <7> @Aggregation("{ $group : { _id : null, total : { $sum : $age } } }") - AggregationResults sumAgeRaw(); <7> + AggregationResults sumAgeRaw(); <8> @Aggregation("{ '$project': { '_id' : '$lastname' } }") - List findAllLastnames(); <8> + List findAllLastnames(); <9> } ---- [source,java] @@ -52,7 +55,7 @@ public class PersonAggregate { public class SumValue { - private final Long total; <5> <7> + private final Long total; <6> <8> public SumValue(Long total) { // ... @@ -65,12 +68,13 @@ public class SumValue { <2> If `Sort` argument is present, `$sort` is appended after the declared pipeline stages so that it only affects the order of the final results after having passed all other aggregation stages. Therefore, the `Sort` properties are mapped against the methods return type `PersonAggregate` which turns `Sort.by("lastname")` into `{ $sort : { '_id', 1 } }` because `PersonAggregate.lastname` is annotated with `@Id`. <3> Replaces `?0` with the given value for `property` for a dynamic aggregation pipeline. -<4> `$skip`, `$limit` and `$sort` can be passed on via a `Pageable` argument. Same as in <2>, the operators are appended to the pipeline definition. -<5> Map the result of an aggregation returning a single `Document` to an instance of a desired `SumValue` target type. -<6> Aggregations resulting in single document holding just an accumulation result like eg. `$sum` can be extracted directly from the result `Document`. +<4> `$skip`, `$limit` and `$sort` can be passed on via a `Pageable` argument. Same as in <2>, the operators are appended to the pipeline definition. Methods accepting `Pageable` can return `Slice` for easier pagination. +<5> Aggregation methods can return `Stream` to consume results directly from an underlying cursor. Make sure to close the stream after consuming it to release the server-side cursor by either calling `close()` or through `try-with-resources`. +<6> Map the result of an aggregation returning a single `Document` to an instance of a desired `SumValue` target type. +<7> Aggregations resulting in single document holding just an accumulation result like eg. `$sum` can be extracted directly from the result `Document`. To gain more control, you might consider `AggregationResult` as method return type as shown in <7>. -<7> Obtain the raw `AggregationResults` mapped to the generic target wrapper type `SumValue` or `org.bson.Document`. -<8> Like in <6>, a single value can be directly obtained from multiple result ``Document``s. +<8> Obtain the raw `AggregationResults` mapped to the generic target wrapper type `SumValue` or `org.bson.Document`. +<9> Like in <6>, a single value can be directly obtained from multiple result ``Document``s. ==== In some scenarios, aggregations might require additional options, such as a maximum run time, additional log comments, or the permission to temporarily write data to disk. @@ -115,5 +119,5 @@ Simple-type single-result inspects the returned `Document` and checks for the fo . Throw an exception if none of the above is applicable. ==== -WARNING: The `Page` return type is not supported for repository methods using `@Aggregation`. However you can use a -`Pageable` argument to add `$skip`, `$limit` and `$sort` to the pipeline. +WARNING: The `Page` return type is not supported for repository methods using `@Aggregation`. However, you can use a +`Pageable` argument to add `$skip`, `$limit` and `$sort` to the pipeline and let the method return `Slice`. From eec6cea507fbadea492ce5c0015f4a3c27d1d088 Mon Sep 17 00:00:00 2001 From: "Greg L. Turnquist" Date: Thu, 13 May 2021 15:52:30 -0500 Subject: [PATCH 009/983] Update CI to JDK 16. See #3603. --- Jenkinsfile | 10 +++++----- .../Dockerfile | 0 2 files changed, 5 insertions(+), 5 deletions(-) rename ci/{openjdk15-mongodb-4.4 => openjdk16-mongodb-4.4}/Dockerfile (100%) diff --git a/Jenkinsfile b/Jenkinsfile index 6c2c3e17fb..1eb84755a5 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -46,16 +46,16 @@ pipeline { } } } - stage('Publish JDK 15 + MongoDB 4.4') { + stage('Publish JDK 16 + MongoDB 4.4') { when { - changeset "ci/openjdk15-mongodb-4.4/**" + changeset "ci/openjdk16-mongodb-4.4/**" } agent { label 'data' } options { timeout(time: 30, unit: 'MINUTES') } steps { script { - def image = docker.build("springci/spring-data-openjdk15-with-mongodb-4.4.4", "ci/openjdk15-mongodb-4.4/") + def image = docker.build("springci/spring-data-openjdk16-with-mongodb-4.4.4", "ci/openjdk16-mongodb-4.4/") docker.withRegistry('', 'hub.docker.com-springbuildmaster') { image.push() } @@ -151,7 +151,7 @@ pipeline { } } - stage("test: baseline (jdk15)") { + stage("test: baseline (jdk16)") { agent { label 'data' } @@ -162,7 +162,7 @@ pipeline { steps { script { docker.withRegistry('', 'hub.docker.com-springbuildmaster') { - docker.image('springci/spring-data-openjdk15-with-mongodb-4.4.4:latest').inside('-v $HOME:/tmp/jenkins-home') { + docker.image('springci/spring-data-openjdk16-with-mongodb-4.4.4:latest').inside('-v $HOME:/tmp/jenkins-home') { sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log' sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &' sh 'sleep 10' diff --git a/ci/openjdk15-mongodb-4.4/Dockerfile b/ci/openjdk16-mongodb-4.4/Dockerfile similarity index 100% rename from ci/openjdk15-mongodb-4.4/Dockerfile rename to ci/openjdk16-mongodb-4.4/Dockerfile From 2ee33b144476548e149fc2c92bff7c12d3b0daba Mon Sep 17 00:00:00 2001 From: "Greg L. Turnquist" Date: Thu, 13 May 2021 15:56:39 -0500 Subject: [PATCH 010/983] Polishing. --- ci/openjdk16-mongodb-4.4/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/openjdk16-mongodb-4.4/Dockerfile b/ci/openjdk16-mongodb-4.4/Dockerfile index 77e32d4c97..30ea554cad 100644 --- a/ci/openjdk16-mongodb-4.4/Dockerfile +++ b/ci/openjdk16-mongodb-4.4/Dockerfile @@ -1,4 +1,4 @@ -FROM adoptopenjdk/openjdk15:latest +FROM adoptopenjdk/openjdk16:latest ENV TZ=Etc/UTC ENV DEBIAN_FRONTEND=noninteractive From 80c5b536df975b411f53f8a70e7ac64210f1fe58 Mon Sep 17 00:00:00 2001 From: "Greg L. Turnquist" Date: Thu, 13 May 2021 16:15:04 -0500 Subject: [PATCH 011/983] Polishing. --- ci/openjdk16-mongodb-4.4/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/openjdk16-mongodb-4.4/Dockerfile b/ci/openjdk16-mongodb-4.4/Dockerfile index 30ea554cad..7a1e47cf00 100644 --- a/ci/openjdk16-mongodb-4.4/Dockerfile +++ b/ci/openjdk16-mongodb-4.4/Dockerfile @@ -1,4 +1,4 @@ -FROM adoptopenjdk/openjdk16:latest +FROM adoptopenjdk/openjdk16:latest ENV TZ=Etc/UTC ENV DEBIAN_FRONTEND=noninteractive From 124036fe366cc00594c2096d8e73d38e70369273 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 14 May 2021 11:51:46 +0200 Subject: [PATCH 012/983] Updated changelog. See #3628 --- src/main/resources/changelog.txt | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/main/resources/changelog.txt b/src/main/resources/changelog.txt index e7b34bdd63..16dd5d244b 100644 --- a/src/main/resources/changelog.txt +++ b/src/main/resources/changelog.txt @@ -1,6 +1,10 @@ Spring Data MongoDB Changelog ============================= +Changes in version 3.1.9 (2021-05-14) +------------------------------------- + + Changes in version 3.2.0 (2021-04-14) ------------------------------------- * #3623 - `@Aggregation` repository query method causes `NullPointerException` when the result is empty. @@ -3430,5 +3434,6 @@ Repository + From ff7588f6489ce83e6846ec2e50fa3d64c724b247 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 14 May 2021 12:23:14 +0200 Subject: [PATCH 013/983] Updated changelog. See #3629 --- src/main/resources/changelog.txt | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/main/resources/changelog.txt b/src/main/resources/changelog.txt index 16dd5d244b..3de91d4a95 100644 --- a/src/main/resources/changelog.txt +++ b/src/main/resources/changelog.txt @@ -1,6 +1,12 @@ Spring Data MongoDB Changelog ============================= +Changes in version 3.2.1 (2021-05-14) +------------------------------------- +* #3638 - Introduce template method for easier customization of fragments. +* #3632 - Fix bullet points in aggregations framework asciidoc. + + Changes in version 3.1.9 (2021-05-14) ------------------------------------- @@ -3435,5 +3441,6 @@ Repository + From f1354c45084323daa6459b23bebb5209fc769003 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 18 May 2021 15:11:06 +0200 Subject: [PATCH 014/983] Revise DocumentCallback nullability constraints. DocumentCallback is now generally non-nullable for both, the input Document and the returned result expecting EntityReader to always return a non-null object. Also, use try-with-resources where applicable. Closes #3648 --- .../data/mongodb/core/MongoTemplate.java | 106 +++++++----------- .../mongodb/core/ReactiveMongoTemplate.java | 38 ++++--- .../core/convert/MappingMongoConverter.java | 2 +- .../mongodb/core/MongoTemplateUnitTests.java | 20 +++- .../core/ReactiveMongoTemplateUnitTests.java | 15 +++ 5 files changed, 97 insertions(+), 84 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java index d8ac2d0a03..eae4f42706 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java @@ -28,6 +28,7 @@ import org.bson.conversions.Bson; import org.slf4j.Logger; import org.slf4j.LoggerFactory; + import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; @@ -46,6 +47,7 @@ import org.springframework.data.geo.GeoResult; import org.springframework.data.geo.GeoResults; import org.springframework.data.geo.Metric; +import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.callback.EntityCallbacks; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.MongoDatabaseFactory; @@ -102,7 +104,6 @@ import org.springframework.data.projection.SpelAwareProxyProjectionFactory; import org.springframework.data.util.CloseableIterator; import org.springframework.data.util.Optionals; -import org.springframework.jca.cci.core.ConnectionCallback; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; @@ -972,7 +973,7 @@ public GeoResults geoNear(NearQuery near, Class domainType, String col for (Document element : results) { GeoResult geoResult = callback.doWith(element); - aggregate = aggregate.add(new BigDecimal(geoResult.getDistance().getValue())); + aggregate = aggregate.add(BigDecimal.valueOf(geoResult.getDistance().getValue())); result.add(geoResult); } @@ -2751,25 +2752,24 @@ private MongoCollection getAndPrepareCollection(MongoDatabase db, Stri * Internal method using callbacks to do queries against the datastore that requires reading a single object from a * collection of objects. It will take the following steps *
    - *
  1. Execute the given {@link ConnectionCallback} for a {@link Document}.
  2. + *
  3. Execute the given {@link CollectionCallback} for a {@link Document}.
  4. *
  5. Apply the given {@link DocumentCallback} to each of the {@link Document}s to obtain the result.
  6. *
      * * @param * @param collectionCallback the callback to retrieve the {@link Document} with - * @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type + * @param documentCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type * @param collectionName the collection to be queried * @return */ @Nullable private T executeFindOneInternal(CollectionCallback collectionCallback, - DocumentCallback objectCallback, String collectionName) { + DocumentCallback documentCallback, String collectionName) { try { - T result = objectCallback - .doWith(collectionCallback.doInCollection(getAndPrepareCollection(doGetDatabase(), collectionName))); - return result; + Document document = collectionCallback.doInCollection(getAndPrepareCollection(doGetDatabase(), collectionName)); + return document != null ? documentCallback.doWith(document) : null; } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } @@ -2779,7 +2779,7 @@ private T executeFindOneInternal(CollectionCallback collectionCall * Internal method using callback to do queries against the datastore that requires reading a collection of objects. * It will take the following steps *
        - *
      1. Execute the given {@link ConnectionCallback} for a {@link FindIterable}.
      2. + *
      3. Execute the given {@link CollectionCallback} for a {@link FindIterable}.
      4. *
      5. Prepare that {@link FindIterable} with the given {@link CursorPreparer} (will be skipped if * {@link CursorPreparer} is {@literal null}
      6. *
      7. Iterate over the {@link FindIterable} and applies the given {@link DocumentCallback} to each of the @@ -2789,36 +2789,27 @@ private T executeFindOneInternal(CollectionCallback collectionCall * @param * @param collectionCallback the callback to retrieve the {@link FindIterable} with * @param preparer the {@link CursorPreparer} to potentially modify the {@link FindIterable} before iterating over it - * @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type + * @param documentCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type * @param collectionName the collection to be queried * @return */ private List executeFindMultiInternal(CollectionCallback> collectionCallback, - CursorPreparer preparer, DocumentCallback objectCallback, String collectionName) { + CursorPreparer preparer, DocumentCallback documentCallback, String collectionName) { try { - MongoCursor cursor = null; - - try { - - cursor = preparer - .initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection) - .iterator(); + try (MongoCursor cursor = preparer + .initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection) + .iterator()) { List result = new ArrayList<>(); while (cursor.hasNext()) { Document object = cursor.next(); - result.add(objectCallback.doWith(object)); + result.add(documentCallback.doWith(object)); } return result; - } finally { - - if (cursor != null) { - cursor.close(); - } } } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); @@ -2828,24 +2819,13 @@ private List executeFindMultiInternal(CollectionCallback> collectionCallback, CursorPreparer preparer, DocumentCallbackHandler callbackHandler, String collectionName) { - try { - - MongoCursor cursor = null; - - try { - - cursor = preparer - .initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection) - .iterator(); + try (MongoCursor cursor = preparer + .initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection) + .iterator()) { while (cursor.hasNext()) { callbackHandler.processDocument(cursor.next()); } - } finally { - if (cursor != null) { - cursor.close(); - } - } } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } @@ -3143,8 +3123,7 @@ public Document doInCollection(MongoCollection collection) throws Mong interface DocumentCallback { - @Nullable - T doWith(@Nullable Document object); + T doWith(Document object); } /** @@ -3168,22 +3147,19 @@ private class ReadDocumentCallback implements DocumentCallback { this.collectionName = collectionName; } - @Nullable - public T doWith(@Nullable Document document) { - - T source = null; + public T doWith(Document document) { - if (document != null) { maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName)); - source = reader.read(type, document); - } + T entity = reader.read(type, document); - if (source != null) { - maybeEmitEvent(new AfterConvertEvent<>(document, source, collectionName)); - source = maybeCallAfterConvert(source, document, collectionName); - } + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", reader)); + } - return source; + maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); + entity = maybeCallAfterConvert(entity, document, collectionName); + + return entity; } } @@ -3216,8 +3192,7 @@ private class ProjectingReadCallback implements DocumentCallback { * @see org.springframework.data.mongodb.core.MongoTemplate.DocumentCallback#doWith(org.bson.Document) */ @SuppressWarnings("unchecked") - @Nullable - public T doWith(@Nullable Document document) { + public T doWith(Document document) { if (document == null) { return null; @@ -3228,15 +3203,16 @@ public T doWith(@Nullable Document document) { maybeEmitEvent(new AfterLoadEvent<>(document, targetType, collectionName)); - Object source = reader.read(typeToRead, document); - Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, source) : source; + Object entity = reader.read(typeToRead, document); - if (result != null) { - maybeEmitEvent(new AfterConvertEvent<>(document, result, collectionName)); - result = maybeCallAfterConvert(result, document, collectionName); + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", reader)); } - return (T) result; + Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, entity) : entity; + + maybeEmitEvent(new AfterConvertEvent<>(document, result, collectionName)); + return (T) maybeCallAfterConvert(result, document, collectionName); } } @@ -3373,8 +3349,7 @@ static class GeoNearResultDocumentCallback implements DocumentCallback doWith(@Nullable Document object) { + public GeoResult doWith(Document object) { double distance = Double.NaN; if (object.containsKey(distanceField)) { @@ -3401,10 +3376,6 @@ static class CloseableIterableCursorAdapter implements CloseableIterator { /** * Creates a new {@link CloseableIterableCursorAdapter} backed by the given {@link MongoCollection}. - * - * @param cursor - * @param exceptionTranslator - * @param objectReadCallback */ CloseableIterableCursorAdapter(MongoIterable cursor, PersistenceExceptionTranslator exceptionTranslator, DocumentCallback objectReadCallback) { @@ -3448,8 +3419,7 @@ public T next() { try { Document item = cursor.next(); - T converted = objectReadCallback.doWith(item); - return converted; + return objectReadCallback.doWith(item); } catch (RuntimeException ex) { throw potentiallyConvertRuntimeException(ex, exceptionTranslator); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java index d6743db9d0..614894f3b6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java @@ -22,7 +22,15 @@ import reactor.util.function.Tuple2; import reactor.util.function.Tuples; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Function; @@ -36,6 +44,7 @@ import org.reactivestreams.Subscriber; import org.slf4j.Logger; import org.slf4j.LoggerFactory; + import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; @@ -51,6 +60,7 @@ import org.springframework.data.geo.Distance; import org.springframework.data.geo.GeoResult; import org.springframework.data.geo.Metric; +import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.PersistentEntity; import org.springframework.data.mapping.callback.ReactiveEntityCallbacks; import org.springframework.data.mapping.context.MappingContext; @@ -3152,13 +3162,14 @@ public Mono doWith(Document document) { maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName)); - T source = reader.read(type, document); - if (source != null) { - maybeEmitEvent(new AfterConvertEvent<>(document, source, collectionName)); - return maybeCallAfterConvert(source, document, collectionName); + T entity = reader.read(type, document); + + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", reader)); } - return Mono.empty(); + maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); + return maybeCallAfterConvert(entity, document, collectionName); } } @@ -3196,16 +3207,17 @@ public Mono doWith(Document document) { maybeEmitEvent(new AfterLoadEvent<>(document, typeToRead, collectionName)); - Object source = reader.read(typeToRead, document); - Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, source) : source; + Object entity = reader.read(typeToRead, document); - T castEntity = (T) result; - if (castEntity != null) { - maybeEmitEvent(new AfterConvertEvent<>(document, castEntity, collectionName)); - return maybeCallAfterConvert(castEntity, document, collectionName); + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", reader)); } - return Mono.empty(); + Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, entity) : entity; + + T castEntity = (T) result; + maybeEmitEvent(new AfterConvertEvent<>(document, castEntity, collectionName)); + return maybeCallAfterConvert(castEntity, document, collectionName); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 4a2a7fc152..74d189b4c5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -270,7 +270,7 @@ public void setEntityCallbacks(EntityCallbacks entityCallbacks) { * (non-Javadoc) * @see org.springframework.data.mongodb.core.core.MongoReader#read(java.lang.Class, com.mongodb.Document) */ - public S read(Class clazz, final Bson bson) { + public S read(Class clazz, Bson bson) { return read(ClassTypeInformation.from(clazz), bson); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java index 150ba5a861..f8170889b6 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java @@ -50,6 +50,7 @@ import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; + import org.springframework.beans.factory.annotation.Value; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationListener; @@ -64,8 +65,10 @@ import org.springframework.data.convert.CustomConversions; import org.springframework.data.domain.Sort; import org.springframework.data.geo.Point; +import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.callback.EntityCallbacks; import org.springframework.data.mapping.context.InvalidPersistentPropertyPath; +import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.aggregation.*; import org.springframework.data.mongodb.core.aggregation.ComparisonOperators.Gte; @@ -394,11 +397,24 @@ void findAllAndRemoveShouldRetrieveMatchingDocumentsPriorToRemoval() { verify(collection, times(1)).find(Mockito.eq(query.getQueryObject()), any(Class.class)); } + @Test // GH-3648 + void shouldThrowExceptionIfEntityReaderReturnsNull() { + + when(cursor.hasNext()).thenReturn(true).thenReturn(true).thenReturn(false); + when(cursor.next()).thenReturn(new org.bson.Document("_id", Integer.valueOf(0))); + MappingMongoConverter converter = mock(MappingMongoConverter.class); + when(converter.getMappingContext()).thenReturn((MappingContext) mappingContext); + template = new MongoTemplate(factory, converter); + + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> template.findAll(Person.class)) + .withMessageContaining("returned null"); + } + @Test // DATAMONGO-566 void findAllAndRemoveShouldRemoveDocumentsReturedByFindQuery() { - Mockito.when(cursor.hasNext()).thenReturn(true).thenReturn(true).thenReturn(false); - Mockito.when(cursor.next()).thenReturn(new org.bson.Document("_id", Integer.valueOf(0))) + when(cursor.hasNext()).thenReturn(true).thenReturn(true).thenReturn(false); + when(cursor.next()).thenReturn(new org.bson.Document("_id", Integer.valueOf(0))) .thenReturn(new org.bson.Document("_id", Integer.valueOf(1))); ArgumentCaptor queryCaptor = ArgumentCaptor.forClass(org.bson.Document.class); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java index eb44de349f..931ea75cea 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java @@ -57,7 +57,9 @@ import org.springframework.context.ApplicationListener; import org.springframework.context.support.StaticApplicationContext; import org.springframework.data.annotation.Id; +import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.callback.ReactiveEntityCallbacks; +import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.MongoTemplateUnitTests.AutogenerateableId; import org.springframework.data.mongodb.core.aggregation.AggregationOptions; import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; @@ -1137,6 +1139,19 @@ void saveVersionedShouldProjectOnShardKeyWhenLoadingExistingDocument() { verify(findPublisher).projection(new Document("country", 1).append("userid", 1)); } + @Test // GH-3648 + void shouldThrowExceptionIfEntityReaderReturnsNull() { + + MappingMongoConverter converter = mock(MappingMongoConverter.class); + when(converter.getMappingContext()).thenReturn((MappingContext) mappingContext); + template = new ReactiveMongoTemplate(factory, converter); + + when(collection.find(Document.class)).thenReturn(findPublisher); + stubFindSubscribe(new Document()); + + template.find(new Query(), Person.class).as(StepVerifier::create).verifyError(MappingException.class); + } + @Test // DATAMONGO-2479 void findShouldInvokeAfterConvertCallbacks() { From a51c96298ffe35e3aeb49d69126bcb8be236277a Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Fri, 5 Mar 2021 07:27:02 +0100 Subject: [PATCH 015/983] Enhance support for linking entities. Add initial support for an alternative to the existing DBRef scenario. The enhancement allows to store and retrieve linked entites via their id or a customizable lookup query. Original pull request: #3647. Closes #3602. --- .../mongodb/core/convert/DbRefResolver.java | 2 +- .../core/convert/DefaultDbRefResolver.java | 29 +- .../core/convert/DefaultReferenceLoader.java | 71 ++ .../convert/DefaultReferenceResolver.java | 69 ++ .../convert/LazyLoadingProxyGenerator.java | 253 +++++++ .../core/convert/MappingMongoConverter.java | 88 ++- .../core/convert/NoOpDbRefResolver.java | 16 + .../mongodb/core/convert/ReferenceLoader.java | 79 +++ .../mongodb/core/convert/ReferenceReader.java | 350 ++++++++++ .../core/convert/ReferenceResolver.java | 74 ++ .../core/mapping/DocumentReference.java | 50 ++ .../mongodb/core/mapping/ObjectReference.java | 24 + .../MongoTemplateDocumentReferenceTests.java | 649 ++++++++++++++++++ .../DbRefMappingMongoConverterUnitTests.java | 2 + .../DefaultDbRefResolverUnitTests.java | 7 +- .../core/convert/LazyLoadingTestUtils.java | 30 + .../performance/ReactivePerformanceTests.java | 20 + .../src/test/resources/logback.xml | 1 + 18 files changed, 1779 insertions(+), 35 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceLoader.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyGenerator.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceReader.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ObjectReference.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolver.java index da26f4cce6..f482ae0f1c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DbRefResolver.java @@ -35,7 +35,7 @@ * @author Mark Paluch * @since 1.4 */ -public interface DbRefResolver { +public interface DbRefResolver extends ReferenceResolver { /** * Resolves the given {@link DBRef} into an object of the given {@link MongoPersistentProperty}'s type. The method diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java index 8b6674460c..96b6c6876b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java @@ -46,6 +46,7 @@ import org.springframework.data.mongodb.LazyLoadingException; import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.MongoDatabaseUtils; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.ReferenceFilter; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.lang.Nullable; import org.springframework.objenesis.ObjenesisStd; @@ -67,7 +68,7 @@ * @author Mark Paluch * @since 1.4 */ -public class DefaultDbRefResolver implements DbRefResolver { +public class DefaultDbRefResolver extends DefaultReferenceResolver implements DbRefResolver, ReferenceResolver { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultDbRefResolver.class); @@ -82,6 +83,8 @@ public class DefaultDbRefResolver implements DbRefResolver { */ public DefaultDbRefResolver(MongoDatabaseFactory mongoDbFactory) { + super(new DefaultReferenceLoader(mongoDbFactory)); + Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!"); this.mongoDbFactory = mongoDbFactory; @@ -114,17 +117,7 @@ public Object resolveDbRef(MongoPersistentProperty property, @Nullable DBRef dbr */ @Override public Document fetch(DBRef dbRef) { - - MongoCollection mongoCollection = getCollection(dbRef); - - if (LOGGER.isTraceEnabled()) { - LOGGER.trace("Fetching DBRef '{}' from {}.{}.", dbRef.getId(), - StringUtils.hasText(dbRef.getDatabaseName()) ? dbRef.getDatabaseName() - : mongoCollection.getNamespace().getDatabaseName(), - dbRef.getCollectionName()); - } - - return mongoCollection.find(Filters.eq("_id", dbRef.getId())).first(); + return getReferenceLoader().fetch(ReferenceFilter.singleReferenceFilter(Filters.eq("_id", dbRef.getId())), ReferenceContext.fromDBRef(dbRef)); } /* @@ -164,9 +157,9 @@ public List bulkFetch(List refs) { databaseSource.getCollectionName()); } - List result = mongoCollection // - .find(new Document("_id", new Document("$in", ids))) // - .into(new ArrayList<>()); + List result = getReferenceLoader() + .bulkFetch(ReferenceFilter.referenceFilter(new Document("_id", new Document("$in", ids))), ReferenceContext.fromDBRef(refs.iterator().next())) + .collect(Collectors.toList()); return ids.stream() // .flatMap(id -> documentWithId(id, result)) // @@ -504,4 +497,10 @@ protected MongoCollection getCollection(DBRef dbref) { return MongoDatabaseUtils.getDatabase(dbref.getDatabaseName(), mongoDbFactory) .getCollection(dbref.getCollectionName(), Document.class); } + + protected MongoCollection getCollection(ReferenceContext context) { + + return MongoDatabaseUtils.getDatabase(context.database, mongoDbFactory).getCollection(context.collection, + Document.class); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceLoader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceLoader.java new file mode 100644 index 0000000000..27feca163d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceLoader.java @@ -0,0 +1,71 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.MongoDatabaseUtils; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceContext; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.client.FindIterable; +import com.mongodb.client.MongoCollection; + +/** + * @author Christoph Strobl + */ +public class DefaultReferenceLoader implements ReferenceLoader { + + private static final Logger LOGGER = LoggerFactory.getLogger(DefaultReferenceLoader.class); + + private final MongoDatabaseFactory mongoDbFactory; + + public DefaultReferenceLoader(MongoDatabaseFactory mongoDbFactory) { + + Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!"); + + this.mongoDbFactory = mongoDbFactory; + } + + @Override + public Stream bulkFetch(ReferenceFilter filter, ReferenceContext context) { + + MongoCollection collection = getCollection(context); + + if (LOGGER.isTraceEnabled()) { + LOGGER.trace("Bulk fetching {} from {}.{}.", filter, + StringUtils.hasText(context.getDatabase()) ? context.getDatabase() + : collection.getNamespace().getDatabaseName(), + context.getCollection()); + } + + return filter.apply(collection); + } + + protected MongoCollection getCollection(ReferenceContext context) { + + return MongoDatabaseUtils.getDatabase(context.database, mongoDbFactory).getCollection(context.collection, + Document.class); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java new file mode 100644 index 0000000000..b4324b505f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java @@ -0,0 +1,69 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.util.function.BiFunction; +import java.util.stream.Stream; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.ReferenceFilter; +import org.springframework.data.mongodb.core.mapping.DocumentReference; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.lang.Nullable; + +/** + * @author Christoph Strobl + */ +public class DefaultReferenceResolver implements ReferenceResolver { + + private final ReferenceLoader referenceLoader; + + public DefaultReferenceResolver(ReferenceLoader referenceLoader) { + this.referenceLoader = referenceLoader; + } + + @Override + public ReferenceLoader getReferenceLoader() { + return referenceLoader; + } + + @Nullable + @Override + public Object resolveReference(MongoPersistentProperty property, Object source, ReferenceReader referenceReader, + BiFunction> lookupFunction) { + + if (isLazyReference(property)) { + return createLazyLoadingProxy(property, source, referenceReader, lookupFunction); + } + + return referenceReader.readReference(property, source, lookupFunction); + } + + private Object createLazyLoadingProxy(MongoPersistentProperty property, Object source, + ReferenceReader referenceReader, BiFunction> lookupFunction) { + return new LazyLoadingProxyGenerator(referenceReader).createLazyLoadingProxy(property, source, lookupFunction); + } + + protected boolean isLazyReference(MongoPersistentProperty property) { + + if (property.findAnnotation(DocumentReference.class) != null) { + return property.findAnnotation(DocumentReference.class).lazy(); + } + + return property.getDBRef() != null && property.getDBRef().lazy(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyGenerator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyGenerator.java new file mode 100644 index 0000000000..35da1e1e23 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyGenerator.java @@ -0,0 +1,253 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.springframework.util.ReflectionUtils.*; + +import java.io.Serializable; +import java.lang.reflect.Method; +import java.util.function.BiFunction; +import java.util.stream.Stream; + +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + +import org.aopalliance.intercept.MethodInterceptor; +import org.aopalliance.intercept.MethodInvocation; +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.aop.framework.ProxyFactory; +import org.springframework.cglib.proxy.Callback; +import org.springframework.cglib.proxy.Enhancer; +import org.springframework.cglib.proxy.Factory; +import org.springframework.cglib.proxy.MethodProxy; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.ReferenceFilter; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.objenesis.ObjenesisStd; +import org.springframework.util.ReflectionUtils; + +/** + * @author Christoph Strobl + */ +class LazyLoadingProxyGenerator { + + private final ObjenesisStd objenesis; + private final ReferenceReader referenceReader; + + public LazyLoadingProxyGenerator(ReferenceReader referenceReader) { + + this.referenceReader = referenceReader; + this.objenesis = new ObjenesisStd(true); + } + + public Object createLazyLoadingProxy(MongoPersistentProperty property, Object source, + BiFunction> lookupFunction) { + + Class propertyType = property.getType(); + LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, source, referenceReader, lookupFunction); + + if (!propertyType.isInterface()) { + + Factory factory = (Factory) objenesis.newInstance(getEnhancedTypeFor(propertyType)); + factory.setCallbacks(new Callback[] { interceptor }); + + return factory; + } + + ProxyFactory proxyFactory = new ProxyFactory(); + + for (Class type : propertyType.getInterfaces()) { + proxyFactory.addInterface(type); + } + + proxyFactory.addInterface(LazyLoadingProxy.class); + proxyFactory.addInterface(propertyType); + proxyFactory.addAdvice(interceptor); + + return proxyFactory.getProxy(LazyLoadingProxy.class.getClassLoader()); + } + + /** + * Returns the CGLib enhanced type for the given source type. + * + * @param type + * @return + */ + private Class getEnhancedTypeFor(Class type) { + + Enhancer enhancer = new Enhancer(); + enhancer.setSuperclass(type); + enhancer.setCallbackType(org.springframework.cglib.proxy.MethodInterceptor.class); + enhancer.setInterfaces(new Class[] { LazyLoadingProxy.class }); + + return enhancer.createClass(); + } + + public static class LazyLoadingInterceptor + implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor, Serializable { + + private final ReferenceReader referenceReader; + MongoPersistentProperty property; + private volatile boolean resolved; + private @org.springframework.lang.Nullable Object result; + private Object source; + private BiFunction> lookupFunction; + + private final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD; + + { + try { + INITIALIZE_METHOD = LazyLoadingProxy.class.getMethod("getTarget"); + TO_DBREF_METHOD = LazyLoadingProxy.class.getMethod("toDBRef"); + FINALIZE_METHOD = Object.class.getDeclaredMethod("finalize"); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + public LazyLoadingInterceptor(MongoPersistentProperty property, Object source, ReferenceReader reader, + BiFunction> lookupFunction) { + + this.property = property; + this.source = source; + this.referenceReader = reader; + this.lookupFunction = lookupFunction; + } + + @Nullable + @Override + public Object invoke(@Nonnull MethodInvocation invocation) throws Throwable { + return intercept(invocation.getThis(), invocation.getMethod(), invocation.getArguments(), null); + } + + @Override + public Object intercept(Object o, Method method, Object[] args, MethodProxy proxy) throws Throwable { + + if (INITIALIZE_METHOD.equals(method)) { + return ensureResolved(); + } + + if (TO_DBREF_METHOD.equals(method)) { + return null; + } + + if (isObjectMethod(method) && Object.class.equals(method.getDeclaringClass())) { + + if (ReflectionUtils.isToStringMethod(method)) { + return proxyToString(proxy); + } + + if (ReflectionUtils.isEqualsMethod(method)) { + return proxyEquals(proxy, args[0]); + } + + if (ReflectionUtils.isHashCodeMethod(method)) { + return proxyHashCode(proxy); + } + + // DATAMONGO-1076 - finalize methods should not trigger proxy initialization + if (FINALIZE_METHOD.equals(method)) { + return null; + } + } + + Object target = ensureResolved(); + + if (target == null) { + return null; + } + + ReflectionUtils.makeAccessible(method); + + return method.invoke(target, args); + } + + private Object ensureResolved() { + + if (!resolved) { + this.result = resolve(); + this.resolved = true; + } + + return this.result; + } + + private String proxyToString(Object source) { + + StringBuilder description = new StringBuilder(); + if (source != null) { + description.append(source); + } else { + description.append(System.identityHashCode(source)); + } + description.append("$").append(LazyLoadingProxy.class.getSimpleName()); + + return description.toString(); + } + + private boolean proxyEquals(@org.springframework.lang.Nullable Object proxy, Object that) { + + if (!(that instanceof LazyLoadingProxy)) { + return false; + } + + if (that == proxy) { + return true; + } + + return proxyToString(proxy).equals(that.toString()); + } + + private int proxyHashCode(@org.springframework.lang.Nullable Object proxy) { + return proxyToString(proxy).hashCode(); + } + + @org.springframework.lang.Nullable + private synchronized Object resolve() { + + if (resolved) { + + // if (LOGGER.isTraceEnabled()) { + // LOGGER.trace("Accessing already resolved lazy loading property {}.{}", + // property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName()); + // } + return result; + } + + try { + // if (LOGGER.isTraceEnabled()) { + // LOGGER.trace("Resolving lazy loading property {}.{}", + // property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName()); + // } + + return referenceReader.readReference(property, source, lookupFunction); + + } catch (RuntimeException ex) { + throw ex; + + // DataAccessException translatedException = this.exceptionTranslator.translateExceptionIfPossible(ex); + // + // if (translatedException instanceof ClientSessionException) { + // throw new LazyLoadingException("Unable to lazily resolve DBRef! Invalid session state.", ex); + // } + + // throw new LazyLoadingException("Unable to lazily resolve DBRef!", + // translatedException != null ? translatedException : ex); + } + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 74d189b4c5..0d3378d39f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -28,6 +28,7 @@ import java.util.Map.Entry; import java.util.Optional; import java.util.Set; +import java.util.stream.Collectors; import org.bson.Document; import org.bson.codecs.Codec; @@ -62,8 +63,10 @@ import org.springframework.data.mapping.model.SpELExpressionParameterValueProvider; import org.springframework.data.mongodb.CodecRegistryProvider; import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.ObjectReference; import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.data.mongodb.core.mapping.Unwrapped.OnEmpty; import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback; @@ -112,6 +115,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App protected final QueryMapper idMapper; protected final DbRefResolver dbRefResolver; protected final DefaultDbRefProxyHandler dbRefProxyHandler; + protected final ReferenceReader referenceReader; protected @Nullable ApplicationContext applicationContext; protected MongoTypeMapper typeMapper; @@ -136,12 +140,12 @@ public MappingMongoConverter(DbRefResolver dbRefResolver, Assert.notNull(mappingContext, "MappingContext must not be null!"); this.dbRefResolver = dbRefResolver; + this.mappingContext = mappingContext; this.typeMapper = new DefaultMongoTypeMapper(DefaultMongoTypeMapper.DEFAULT_TYPE_KEY, mappingContext, this::getWriteTarget); this.idMapper = new QueryMapper(this); - this.spELContext = new SpELContext(DocumentPropertyAccessor.INSTANCE); this.dbRefProxyHandler = new DefaultDbRefProxyHandler(spELContext, mappingContext, (prop, bson, evaluator, path) -> { @@ -149,6 +153,9 @@ public MappingMongoConverter(DbRefResolver dbRefResolver, ConversionContext context = getConversionContext(path); return MappingMongoConverter.this.getValueInternal(context, prop, bson, evaluator); }); + + this.referenceReader = new ReferenceReader(mappingContext, + (prop, document) -> this.read(prop.getActualType(), document), () -> spELContext); } /** @@ -376,8 +383,7 @@ private S read(ConversionContext context, MongoPersistentEnti } private S populateProperties(ConversionContext context, MongoPersistentEntity entity, - DocumentAccessor documentAccessor, - SpELExpressionEvaluator evaluator, S instance) { + DocumentAccessor documentAccessor, SpELExpressionEvaluator evaluator, S instance) { PersistentPropertyAccessor accessor = new ConvertingPropertyAccessor<>(entity.getPropertyAccessor(instance), conversionService); @@ -423,8 +429,7 @@ private Object readAndPopulateIdentifier(ConversionContext context, PersistentPr @Nullable private Object readIdValue(ConversionContext context, SpELExpressionEvaluator evaluator, - MongoPersistentProperty idProperty, - Object rawId) { + MongoPersistentProperty idProperty, Object rawId) { String expression = idProperty.getSpelExpression(); Object resolvedValue = expression != null ? evaluator.evaluate(expression) : rawId; @@ -434,8 +439,7 @@ private Object readIdValue(ConversionContext context, SpELExpressionEvaluator ev private void readProperties(ConversionContext context, MongoPersistentEntity entity, PersistentPropertyAccessor accessor, DocumentAccessor documentAccessor, - MongoDbPropertyValueProvider valueProvider, - SpELExpressionEvaluator evaluator) { + MongoDbPropertyValueProvider valueProvider, SpELExpressionEvaluator evaluator) { DbRefResolverCallback callback = null; @@ -493,20 +497,38 @@ private void readAssociation(Association association, P DocumentAccessor documentAccessor, DbRefProxyHandler handler, DbRefResolverCallback callback) { MongoPersistentProperty property = association.getInverse(); - Object value = documentAccessor.get(property); + final Object value = documentAccessor.get(property); if (value == null) { return; } + if (property.isAnnotationPresent(DocumentReference.class)) { + + // quite unusual but sounds like worth having? + + if (conversionService.canConvert(ObjectReference.class, property.getActualType())) { + + // collection like special treatment + accessor.setProperty(property, conversionService.convert(new ObjectReference() { + @Override + public Object getPointer() { + return value; + } + }, property.getActualType())); + } else { + accessor.setProperty(property, dbRefResolver.resolveReference(property, value, referenceReader)); + } + return; + } + DBRef dbref = value instanceof DBRef ? (DBRef) value : null; accessor.setProperty(property, dbRefResolver.resolveDbRef(property, dbref, callback, handler)); } @Nullable private Object readUnwrapped(ConversionContext context, DocumentAccessor documentAccessor, - MongoPersistentProperty prop, - MongoPersistentEntity unwrappedEntity) { + MongoPersistentProperty prop, MongoPersistentEntity unwrappedEntity) { if (prop.findAnnotation(Unwrapped.class).onEmpty().equals(OnEmpty.USE_EMPTY)) { return read(context, unwrappedEntity, (Document) documentAccessor.getDocument()); @@ -725,6 +747,18 @@ protected void writePropertyInternal(@Nullable Object obj, DocumentAccessor acce return; } + if (prop.isAssociation()) { + + if (conversionService.canConvert(valueType.getType(), ObjectReference.class)) { + accessor.put(prop, conversionService.convert(obj, ObjectReference.class).getPointer()); + } else { + // just take the id as a reference + accessor.put(prop, mappingContext.getPersistentEntity(prop.getAssociationTargetType()) + .getIdentifierAccessor(obj).getIdentifier()); + } + return; + } + /* * If we have a LazyLoadingProxy we make sure it is initialized first. */ @@ -763,6 +797,18 @@ protected List createCollection(Collection collection, MongoPersisten if (!property.isDbReference()) { + if (property.isAssociation()) { + return writeCollectionInternal(collection.stream().map(it -> { + if (conversionService.canConvert(it.getClass(), ObjectReference.class)) { + return conversionService.convert(it, ObjectReference.class).getPointer(); + } else { + // just take the id as a reference + return mappingContext.getPersistentEntity(property.getAssociationTargetType()).getIdentifierAccessor(it) + .getIdentifier(); + } + }).collect(Collectors.toList()), ClassTypeInformation.from(ObjectReference.class), new BasicDBList()); + } + if (property.hasExplicitWriteTarget()) { return writeCollectionInternal(collection, new FieldTypeInformation<>(property), new ArrayList<>()); } @@ -795,7 +841,7 @@ protected Bson createMap(Map map, MongoPersistentProperty proper Assert.notNull(map, "Given map must not be null!"); Assert.notNull(property, "PersistentProperty must not be null!"); - if (!property.isDbReference()) { + if (!property.isAssociation()) { return writeMapInternal(map, new Document(), property.getTypeInformation()); } @@ -809,7 +855,17 @@ protected Bson createMap(Map map, MongoPersistentProperty proper if (conversions.isSimpleType(key.getClass())) { String simpleKey = prepareMapKey(key.toString()); - document.put(simpleKey, value != null ? createDBRef(value, property) : null); + if(property.isDbReference()) { + document.put(simpleKey, value != null ? createDBRef(value, property) : null); + } else { + if (conversionService.canConvert(value.getClass(), ObjectReference.class)) { + document.put(simpleKey, conversionService.convert(value, ObjectReference.class).getPointer()); + } else { + // just take the id as a reference + document.put(simpleKey, mappingContext.getPersistentEntity(property.getAssociationTargetType()).getIdentifierAccessor(value) + .getIdentifier()); + } + } } else { throw new MappingException("Cannot use a complex object as a key value."); @@ -1447,8 +1503,7 @@ private List bulkReadAndConvertDBRefs(ConversionContext context, List(document, (Class) type.getType(), collectionName)); + maybeEmitEvent(new AfterLoadEvent<>(document, (Class) type.getType(), collectionName)); target = (T) readDocument(context, document, type); } @@ -1541,9 +1596,10 @@ private T doConvert(Object value, Class target) } @SuppressWarnings("ConstantConditions") - private T doConvert(Object value, Class target, @Nullable Class fallback) { + private T doConvert(Object value, Class target, + @Nullable Class fallback) { - if(conversionService.canConvert(value.getClass(), target) || fallback == null) { + if (conversionService.canConvert(value.getClass(), target) || fallback == null) { return conversionService.convert(value, target); } return conversionService.convert(value, fallback); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java index 8cb28bfe14..cbd02ee74d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java @@ -16,8 +16,12 @@ package org.springframework.data.mongodb.core.convert; import java.util.List; +import java.util.function.BiFunction; +import java.util.stream.Stream; import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.ReferenceFilter; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.lang.Nullable; @@ -69,4 +73,16 @@ public List bulkFetch(List dbRefs) { private T handle() throws UnsupportedOperationException { throw new UnsupportedOperationException("DBRef resolution is not supported!"); } + + @Nullable + @Override + public Object resolveReference(MongoPersistentProperty property, Object source, ReferenceReader referenceReader, + BiFunction> lookupFunction) { + return null; + } + + @Override + public ReferenceLoader getReferenceLoader() { + return handle(); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java new file mode 100644 index 0000000000..0bfd30d9b8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java @@ -0,0 +1,79 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceContext; +import org.springframework.lang.Nullable; + +import com.mongodb.client.MongoCollection; + +/** + * @author Christoph Strobl + */ +public interface ReferenceLoader { + + @Nullable + default Document fetch(ReferenceFilter filter, ReferenceContext context) { + return bulkFetch(filter, context).findFirst().orElse(null); + } + + Stream bulkFetch(ReferenceFilter filter, ReferenceContext context); + + interface ReferenceFilter { + + Bson getFilter(); + + default Bson getSort() { + return new Document(); + } + + default Stream apply(MongoCollection collection) { + return restoreOrder(StreamSupport.stream(collection.find(getFilter()).sort(getSort()).spliterator(), false)); + } + + default Stream restoreOrder(Stream stream) { + return stream; + } + + static ReferenceFilter referenceFilter(Bson bson) { + return () -> bson; + } + + static ReferenceFilter singleReferenceFilter(Bson bson) { + + return new ReferenceFilter() { + + @Override + public Bson getFilter() { + return bson; + } + + @Override + public Stream apply(MongoCollection collection) { + + Document result = collection.find(getFilter()).sort(getSort()).limit(1).first(); + return result != null ? Stream.of(result) : Stream.empty(); + } + }; + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceReader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceReader.java new file mode 100644 index 0000000000..84dfb9c38f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceReader.java @@ -0,0 +1,350 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.Set; +import java.util.function.BiFunction; +import java.util.function.Supplier; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.SpELContext; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.ReferenceFilter; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceContext; +import org.springframework.data.mongodb.core.mapping.DocumentReference; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.mongodb.util.json.ParameterBindingContext; +import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; +import org.springframework.data.mongodb.util.json.ValueProvider; +import org.springframework.data.util.Lazy; +import org.springframework.data.util.Streamable; +import org.springframework.expression.EvaluationContext; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +import com.mongodb.DBRef; +import com.mongodb.client.MongoCollection; + +/** + * @author Christoph Strobl + */ +public class ReferenceReader { + + private final ParameterBindingDocumentCodec codec; + + private final Lazy, MongoPersistentProperty>> mappingContext; + private final BiFunction documentConversionFunction; + private final Supplier spelContextSupplier; + + public ReferenceReader(MappingContext, MongoPersistentProperty> mappingContext, + BiFunction documentConversionFunction, + Supplier spelContextSupplier) { + + this(() -> mappingContext, documentConversionFunction, spelContextSupplier); + } + + public ReferenceReader( + Supplier, MongoPersistentProperty>> mappingContextSupplier, + BiFunction documentConversionFunction, + Supplier spelContextSupplier) { + + this.mappingContext = Lazy.of(mappingContextSupplier); + this.documentConversionFunction = documentConversionFunction; + this.spelContextSupplier = spelContextSupplier; + this.codec = new ParameterBindingDocumentCodec(); + } + + Object readReference(MongoPersistentProperty property, Object value, + BiFunction> lookupFunction) { + + SpELContext spELContext = spelContextSupplier.get(); + + ReferenceFilter filter = computeFilter(property, value, spELContext); + ReferenceContext referenceContext = computeReferenceContext(property, value, spELContext); + + Stream result = lookupFunction.apply(referenceContext, filter); + + if (property.isCollectionLike()) { + return result.map(it -> documentConversionFunction.apply(property, it)).collect(Collectors.toList()); + } + + if (property.isMap()) { + + // the order is a real problem here + Iterator keyIterator = ((Map) value).keySet().iterator(); + return result.map(it -> it.entrySet().stream().collect(Collectors.toMap(key -> key.getKey(), val -> { + Object apply = documentConversionFunction.apply(property, (Document) val.getValue()); + return apply; + }))).findFirst().orElse(null); + } + + return result.map(it -> documentConversionFunction.apply(property, it)).findFirst().orElse(null); + } + + private ReferenceContext computeReferenceContext(MongoPersistentProperty property, Object value, + SpELContext spELContext) { + + if (value instanceof Iterable) { + value = ((Iterable) value).iterator().next(); + } + + if (value instanceof DBRef) { + return ReferenceContext.fromDBRef((DBRef) value); + } + + if (value instanceof Document) { + + Document ref = (Document) value; + + if (property.isAnnotationPresent(DocumentReference.class)) { + + ParameterBindingContext bindingContext = bindingContext(property, value, spELContext); + DocumentReference documentReference = property.getRequiredAnnotation(DocumentReference.class); + + String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext, + () -> ref.get("db", String.class)); + String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext, + () -> ref.get("collection", + mappingContext.get().getPersistentEntity(property.getAssociationTargetType()).getCollection())); + return new ReferenceContext(targetDatabase, targetCollection); + } + + return new ReferenceContext(ref.getString("db"), ref.get("collection", + mappingContext.get().getPersistentEntity(property.getAssociationTargetType()).getCollection())); + } + + if (property.isAnnotationPresent(DocumentReference.class)) { + + ParameterBindingContext bindingContext = bindingContext(property, value, spELContext); + DocumentReference documentReference = property.getRequiredAnnotation(DocumentReference.class); + + String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext, () -> null); + String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext, + () -> mappingContext.get().getPersistentEntity(property.getAssociationTargetType()).getCollection()); + Document sort = parseValueOrGet(documentReference.sort(), bindingContext, () -> null); + + return new ReferenceContext(targetDatabase, targetCollection); + } + + return new ReferenceContext(null, + mappingContext.get().getPersistentEntity(property.getAssociationTargetType()).getCollection()); + } + + @Nullable + private T parseValueOrGet(String value, ParameterBindingContext bindingContext, Supplier defaultValue) { + + if (!StringUtils.hasText(value)) { + return defaultValue.get(); + } + + if (!BsonUtils.isJsonDocument(value) && value.contains("?#{")) { + String s = "{ 'target-value' : " + value + "}"; + T evaluated = (T) new ParameterBindingDocumentCodec().decode(s, bindingContext).get("target-value "); + return evaluated != null ? evaluated : defaultValue.get(); + } + + T evaluated = (T) bindingContext.evaluateExpression(value); + return evaluated != null ? evaluated : defaultValue.get(); + } + + ParameterBindingContext bindingContext(MongoPersistentProperty property, Object source, SpELContext spELContext) { + + return new ParameterBindingContext(valueProviderFor(source), spELContext.getParser(), + () -> evaluationContextFor(property, source, spELContext)); + } + + ValueProvider valueProviderFor(Object source) { + return (index) -> { + + if (source instanceof Document) { + return Streamable.of(((Document) source).values()).toList().get(index); + } + return source; + }; + } + + EvaluationContext evaluationContextFor(MongoPersistentProperty property, Object source, SpELContext spELContext) { + + EvaluationContext ctx = spELContext.getEvaluationContext(source); + ctx.setVariable("target", source); + ctx.setVariable(property.getName(), source); + + return ctx; + } + + ReferenceFilter computeFilter(MongoPersistentProperty property, Object value, SpELContext spELContext) { + + DocumentReference documentReference = property.getRequiredAnnotation(DocumentReference.class); + String lookup = documentReference.lookup(); + + Document sort = parseValueOrGet(documentReference.sort(), bindingContext(property, value, spELContext), () -> null); + + if (property.isCollectionLike() && value instanceof Collection) { + + List ors = new ArrayList<>(); + for (Object entry : (Collection) value) { + + Document decoded = codec.decode(lookup, bindingContext(property, entry, spELContext)); + ors.add(decoded); + } + + return new ListReferenceFilter(new Document("$or", ors), sort); + } + + if (property.isMap() && value instanceof Map) { + + Map filterMap = new LinkedHashMap<>(); + + for (Entry entry : ((Map) value).entrySet()) { + + Document decoded = codec.decode(lookup, bindingContext(property, entry.getValue(), spELContext)); + filterMap.put(entry.getKey(), decoded); + } + + return new MapReferenceFilter(new Document("$or", filterMap.values()), sort, filterMap); + } + + return new SingleReferenceFilter(codec.decode(lookup, bindingContext(property, value, spELContext)), sort); + } + + static class SingleReferenceFilter implements ReferenceFilter { + + Document filter; + Document sort; + + public SingleReferenceFilter(Document filter, Document sort) { + this.filter = filter; + this.sort = sort; + } + + @Override + public Bson getFilter() { + return filter; + } + + @Override + public Stream apply(MongoCollection collection) { + + Document result = collection.find(getFilter()).limit(1).first(); + return result != null ? Stream.of(result) : Stream.empty(); + } + } + + static class MapReferenceFilter implements ReferenceFilter { + + Document filter; + Document sort; + Map filterOrderMap; + + public MapReferenceFilter(Document filter, Document sort, Map filterOrderMap) { + + this.filter = filter; + this.filterOrderMap = filterOrderMap; + this.sort = sort; + } + + @Override + public Bson getFilter() { + return filter; + } + + @Override + public Bson getSort() { + return sort; + } + + @Override + public Stream restoreOrder(Stream stream) { + + Map targetMap = new LinkedHashMap<>(); + List collected = stream.collect(Collectors.toList()); + + for (Entry filterMapping : filterOrderMap.entrySet()) { + + String key = filterMapping.getKey().toString(); + Optional first = collected.stream().filter(it -> { + + boolean found = it.entrySet().containsAll(filterMapping.getValue().entrySet()); + return found; + }).findFirst(); + + targetMap.put(key, first.orElse(null)); + } + return Stream.of(new Document(targetMap)); + } + } + + static class ListReferenceFilter implements ReferenceFilter { + + Document filter; + Document sort; + + public ListReferenceFilter(Document filter, Document sort) { + this.filter = filter; + this.sort = sort; + } + + @Override + public Stream restoreOrder(Stream stream) { + + if (filter.containsKey("$or")) { + List ors = filter.get("$or", List.class); + return stream.sorted((o1, o2) -> compareAgainstReferenceIndex(ors, o1, o2)); + } + + return stream; + } + + public Document getFilter() { + return filter; + } + + @Override + public Document getSort() { + return sort; + } + + int compareAgainstReferenceIndex(List referenceList, Document document1, Document document2) { + + for (int i = 0; i < referenceList.size(); i++) { + + Set> entries = referenceList.get(i).entrySet(); + if (document1.entrySet().containsAll(entries)) { + return -1; + } + if (document2.entrySet().containsAll(entries)) { + return 1; + } + } + return referenceList.size(); + } + + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java new file mode 100644 index 0000000000..ff08953633 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java @@ -0,0 +1,74 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.util.function.BiFunction; +import java.util.stream.Stream; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.ReferenceFilter; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.lang.Nullable; + +import com.mongodb.DBRef; + +/** + * @author Christoph Strobl + */ +public interface ReferenceResolver { + + @Nullable + Object resolveReference(MongoPersistentProperty property, Object source, ReferenceReader referenceReader, + BiFunction> lookupFunction); + + default Object resolveReference(MongoPersistentProperty property, Object source, ReferenceReader referenceReader) { + return resolveReference(property, source, referenceReader, (ctx, filter) -> { + if (property.isCollectionLike() || property.isMap()) { + return getReferenceLoader().bulkFetch(filter, ctx); + } + Object target = getReferenceLoader().fetch(filter, ctx); + return target == null ? Stream.empty() : Stream.of(getReferenceLoader().fetch(filter, ctx)); + }); + } + + ReferenceLoader getReferenceLoader(); + + class ReferenceContext { + + @Nullable final String database; + final String collection; + + public ReferenceContext(@Nullable String database, String collection) { + + this.database = database; + this.collection = collection; + } + + static ReferenceContext fromDBRef(DBRef dbRef) { + return new ReferenceContext(dbRef.getDatabaseName(), dbRef.getCollectionName()); + } + + public String getCollection() { + return collection; + } + + @Nullable + public String getDatabase() { + return database; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java new file mode 100644 index 0000000000..d9af6ccee1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java @@ -0,0 +1,50 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.data.annotation.Reference; + +/** + * @author Christoph Strobl + * @since 3.3 + */ +@Documented +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.FIELD }) +@Reference +public @interface DocumentReference { + + /** + * The database the referred entity resides in. + * + * @return empty String by default. + */ + String db() default ""; + + String collection() default ""; + + String lookup() default "{ '_id' : ?#{#target} }"; + + String sort() default ""; + + boolean lazy() default false; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ObjectReference.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ObjectReference.java new file mode 100644 index 0000000000..ed787f66b4 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ObjectReference.java @@ -0,0 +1,24 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +/** + * @author Christoph Strobl + */ +@FunctionalInterface +public interface ObjectReference { + T getPointer(); +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java new file mode 100644 index 0000000000..3cbc7fef7f --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java @@ -0,0 +1,649 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.query.Criteria.*; +import static org.springframework.data.mongodb.core.query.Query.*; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.Getter; +import lombok.Setter; + +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.bson.Document; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.annotation.Id; +import org.springframework.data.convert.WritingConverter; +import org.springframework.data.mongodb.core.convert.LazyLoadingTestUtils; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.mapping.DocumentReference; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.ObjectReference; +import org.springframework.data.mongodb.test.util.Client; +import org.springframework.data.mongodb.test.util.MongoClientExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.lang.Nullable; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.model.Filters; + +/** + * {@link DBRef} related integration tests for {@link MongoTemplate}. + * + * @author Christoph Strobl + */ +@ExtendWith(MongoClientExtension.class) +public class MongoTemplateDocumentReferenceTests { + + public static final String DB_NAME = "manual-reference-tests"; + + static @Client MongoClient client; + + MongoTestTemplate template = new MongoTestTemplate(cfg -> { + + cfg.configureDatabaseFactory(it -> { + + it.client(client); + it.defaultDb(DB_NAME); + }); + + cfg.configureConversion(it -> { + it.customConverters(new ReferencableConverter()); + }); + + cfg.configureMappingContext(it -> { + it.autocreateIndex(false); + }); + }); + + @BeforeEach + public void setUp() { + template.flushDatabase(); + } + + @Test + void writeSimpleTypeReference() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + SingleRefRoot source = new SingleRefRoot(); + source.id = "root-1"; + source.simpleValueRef = new SimpleObjectRef("ref-1", "me-the-referenced-object"); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("simpleValueRef")).isEqualTo("ref-1"); + } + + @Test + void writeMapTypeReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + + CollectionRefRoot source = new CollectionRefRoot(); + source.id = "root-1"; + source.mapValueRef = new LinkedHashMap<>(); + source.mapValueRef.put("frodo", new SimpleObjectRef("ref-1", "me-the-1-referenced-object")); + source.mapValueRef.put("bilbo", new SimpleObjectRef("ref-2", "me-the-2-referenced-object")); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + System.out.println("target: " + target.toJson()); + assertThat(target.get("mapValueRef", Map.class)).containsEntry("frodo", "ref-1").containsEntry("bilbo", "ref-2"); + } + + @Test + void writeCollectionOfSimpleTypeReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot source = new CollectionRefRoot(); + source.id = "root-1"; + source.simpleValueRef = Arrays.asList(new SimpleObjectRef("ref-1", "me-the-1-referenced-object"), + new SimpleObjectRef("ref-2", "me-the-2-referenced-object")); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("simpleValueRef", List.class)).containsExactly("ref-1", "ref-2"); + } + + @Test + void writeObjectTypeReference() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + SingleRefRoot source = new SingleRefRoot(); + source.id = "root-1"; + source.objectValueRef = new ObjectRefOfDocument("ref-1", "me-the-referenced-object"); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("objectValueRef")).isEqualTo(source.getObjectValueRef().toReference()); + } + + @Test + void writeCollectionOfObjectTypeReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot source = new CollectionRefRoot(); + source.id = "root-1"; + source.objectValueRef = Arrays.asList(new ObjectRefOfDocument("ref-1", "me-the-1-referenced-object"), + new ObjectRefOfDocument("ref-2", "me-the-2-referenced-object")); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("objectValueRef", List.class)).containsExactly( + source.getObjectValueRef().get(0).toReference(), source.getObjectValueRef().get(1).toReference()); + } + + @Test + void readSimpleTypeObjectReference() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simpleValueRef", "ref-1"); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + assertThat(result.getSimpleValueRef()).isEqualTo(new SimpleObjectRef("ref-1", "me-the-referenced-object")); + } + + @Test + void readCollectionOfSimpleTypeObjectReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simpleValueRef", + Collections.singletonList("ref-1")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getSimpleValueRef()).containsExactly(new SimpleObjectRef("ref-1", "me-the-referenced-object")); + } + + @Test + void readLazySimpleTypeObjectReference() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simpleLazyValueRef", "ref-1"); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + + LazyLoadingTestUtils.assertProxy(result.simpleLazyValueRef, (proxy) -> { + + assertThat(proxy.isResolved()).isFalse(); + assertThat(proxy.currentValue()).isNull(); + }); + assertThat(result.getSimpleLazyValueRef()).isEqualTo(new SimpleObjectRef("ref-1", "me-the-referenced-object")); + } + + @Test + void readSimpleTypeObjectReferenceFromFieldWithCustomName() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simple-value-ref-annotated-field-name", + "ref-1"); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + assertThat(result.getSimpleValueRefWithAnnotatedFieldName()) + .isEqualTo(new SimpleObjectRef("ref-1", "me-the-referenced-object")); + } + + @Test + void readCollectionTypeObjectReferenceFromFieldWithCustomName() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simple-value-ref-annotated-field-name", + Collections.singletonList("ref-1")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getSimpleValueRefWithAnnotatedFieldName()) + .containsExactly(new SimpleObjectRef("ref-1", "me-the-referenced-object")); + } + + @Test + void readObjectReferenceFromDocumentType() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(ObjectRefOfDocument.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("objectValueRef", + new Document("id", "ref-1").append("property", "without-any-meaning")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + assertThat(result.getObjectValueRef()).isEqualTo(new ObjectRefOfDocument("ref-1", "me-the-referenced-object")); + } + + @Test + void readCollectionObjectReferenceFromDocumentType() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(ObjectRefOfDocument.class); + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("objectValueRef", + Collections.singletonList(new Document("id", "ref-1").append("property", "without-any-meaning"))); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getObjectValueRef()) + .containsExactly(new ObjectRefOfDocument("ref-1", "me-the-referenced-object")); + } + + @Test + void readObjectReferenceFromDocumentDeclaringCollectionName() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = "object-ref-of-document-with-embedded-collection-name"; + Document refSource = new Document("_id", "ref-1").append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append( + "objectValueRefWithEmbeddedCollectionName", + new Document("id", "ref-1").append("collection", "object-ref-of-document-with-embedded-collection-name") + .append("property", "without-any-meaning")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + assertThat(result.getObjectValueRefWithEmbeddedCollectionName()) + .isEqualTo(new ObjectRefOfDocumentWithEmbeddedCollectionName("ref-1", "me-the-referenced-object")); + } + + @Test + void readCollectionObjectReferenceFromDocumentDeclaringCollectionName() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = "object-ref-of-document-with-embedded-collection-name"; + Document refSource1 = new Document("_id", "ref-1").append("value", "me-the-1-referenced-object"); + Document refSource2 = new Document("_id", "ref-2").append("value", "me-the-2-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append( + "objectValueRefWithEmbeddedCollectionName", + Arrays.asList( + new Document("id", "ref-2").append("collection", "object-ref-of-document-with-embedded-collection-name"), + new Document("id", "ref-1").append("collection", "object-ref-of-document-with-embedded-collection-name") + .append("property", "without-any-meaning"))); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource1); + db.getCollection(refCollectionName).insertOne(refSource2); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getObjectValueRefWithEmbeddedCollectionName()).containsExactly( + new ObjectRefOfDocumentWithEmbeddedCollectionName("ref-2", "me-the-2-referenced-object"), + new ObjectRefOfDocumentWithEmbeddedCollectionName("ref-1", "me-the-1-referenced-object")); + } + + @Test + void readObjectReferenceFromDocumentNotRelatingToTheIdProperty() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(ObjectRefOnNonIdField.class); + Document refSource = new Document("_id", "ref-1").append("refKey1", "ref-key-1").append("refKey2", "ref-key-2") + .append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("objectValueRefOnNonIdFields", + new Document("refKey1", "ref-key-1").append("refKey2", "ref-key-2").append("property", "without-any-meaning")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + assertThat(result.getObjectValueRefOnNonIdFields()) + .isEqualTo(new ObjectRefOnNonIdField("ref-1", "me-the-referenced-object", "ref-key-1", "ref-key-2")); + } + + @Test + void readLazyObjectReferenceFromDocumentNotRelatingToTheIdProperty() { + + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + String refCollectionName = template.getCollectionName(ObjectRefOnNonIdField.class); + Document refSource = new Document("_id", "ref-1").append("refKey1", "ref-key-1").append("refKey2", "ref-key-2") + .append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("lazyObjectValueRefOnNonIdFields", + new Document("refKey1", "ref-key-1").append("refKey2", "ref-key-2").append("property", "without-any-meaning")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + SingleRefRoot result = template.findOne(query(where("id").is("id-1")), SingleRefRoot.class); + + LazyLoadingTestUtils.assertProxy(result.lazyObjectValueRefOnNonIdFields, (proxy) -> { + + assertThat(proxy.isResolved()).isFalse(); + assertThat(proxy.currentValue()).isNull(); + }); + assertThat(result.getLazyObjectValueRefOnNonIdFields()) + .isEqualTo(new ObjectRefOnNonIdField("ref-1", "me-the-referenced-object", "ref-key-1", "ref-key-2")); + } + + @Test + void readCollectionObjectReferenceFromDocumentNotRelatingToTheIdProperty() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(ObjectRefOnNonIdField.class); + Document refSource = new Document("_id", "ref-1").append("refKey1", "ref-key-1").append("refKey2", "ref-key-2") + .append("value", "me-the-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("objectValueRefOnNonIdFields", + Collections.singletonList(new Document("refKey1", "ref-key-1").append("refKey2", "ref-key-2").append("property", + "without-any-meaning"))); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getObjectValueRefOnNonIdFields()) + .containsExactly(new ObjectRefOnNonIdField("ref-1", "me-the-referenced-object", "ref-key-1", "ref-key-2")); + } + + @Test + void readMapOfReferences() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + + Document refSource1 = new Document("_id", "ref-1").append("refKey1", "ref-key-1").append("refKey2", "ref-key-2") + .append("value", "me-the-1-referenced-object"); + + Document refSource2 = new Document("_id", "ref-2").append("refKey1", "ref-key-1").append("refKey2", "ref-key-2") + .append("value", "me-the-2-referenced-object"); + + Map refmap = new LinkedHashMap<>(); + refmap.put("frodo", "ref-1"); + refmap.put("bilbo", "ref-2"); + + Document source = new Document("_id", "id-1").append("value", "v1").append("mapValueRef", refmap); + + template.execute(db -> { + + db.getCollection(rootCollectionName).insertOne(source); + db.getCollection(refCollectionName).insertOne(refSource1); + db.getCollection(refCollectionName).insertOne(refSource2); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + System.out.println("result: " + result); + + assertThat(result.getMapValueRef()).containsEntry("frodo", + new SimpleObjectRef("ref-1", "me-the-1-referenced-object")) + .containsEntry("bilbo", + new SimpleObjectRef("ref-2", "me-the-2-referenced-object")); + } + + @Data + static class SingleRefRoot { + + String id; + String value; + + @DocumentReference SimpleObjectRefWithReadingConverter withReadingConverter; + + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // + SimpleObjectRef simpleValueRef; + + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }", lazy = true) // + SimpleObjectRef simpleLazyValueRef; + + @Field("simple-value-ref-annotated-field-name") // + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // + SimpleObjectRef simpleValueRefWithAnnotatedFieldName; + + @DocumentReference(lookup = "{ '_id' : '?#{id}' }") // + ObjectRefOfDocument objectValueRef; + + @DocumentReference(lookup = "{ '_id' : '?#{id}' }", collection = "#collection") // + ObjectRefOfDocumentWithEmbeddedCollectionName objectValueRefWithEmbeddedCollectionName; + + @DocumentReference(lookup = "{ 'refKey1' : '?#{refKey1}', 'refKey2' : '?#{refKey2}' }") // + ObjectRefOnNonIdField objectValueRefOnNonIdFields; + + @DocumentReference(lookup = "{ 'refKey1' : '?#{refKey1}', 'refKey2' : '?#{refKey2}' }", lazy = true) // + ObjectRefOnNonIdField lazyObjectValueRefOnNonIdFields; + } + + @Data + static class CollectionRefRoot { + + String id; + String value; + + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // + List simpleValueRef; + + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // + Map mapValueRef; + + @Field("simple-value-ref-annotated-field-name") // + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // + List simpleValueRefWithAnnotatedFieldName; + + @DocumentReference(lookup = "{ '_id' : '?#{id}' }") // + List objectValueRef; + + @DocumentReference(lookup = "{ '_id' : '?#{id}' }", collection = "?#{collection}") // + List objectValueRefWithEmbeddedCollectionName; + + @DocumentReference(lookup = "{ 'refKey1' : '?#{refKey1}', 'refKey2' : '?#{refKey2}' }") // + List objectValueRefOnNonIdFields; + } + + @FunctionalInterface + interface ReferenceAble { + Object toReference(); + } + + @Data + @AllArgsConstructor + @org.springframework.data.mongodb.core.mapping.Document("simple-object-ref") + static class SimpleObjectRef { + + @Id String id; + String value; + + } + + @Getter + @Setter + static class SimpleObjectRefWithReadingConverter extends SimpleObjectRef { + + public SimpleObjectRefWithReadingConverter(String id, String value, String id1, String value1) { + super(id, value); + } + } + + @Data + @AllArgsConstructor + static class ObjectRefOfDocument implements ReferenceAble { + + @Id String id; + String value; + + @Override + public Object toReference() { + return new Document("id", id).append("property", "without-any-meaning"); + } + } + + @Data + @AllArgsConstructor + static class ObjectRefOfDocumentWithEmbeddedCollectionName implements ReferenceAble { + + @Id String id; + String value; + + @Override + public Object toReference() { + return new Document("id", id).append("collection", "object-ref-of-document-with-embedded-collection-name"); + } + } + + @Data + @AllArgsConstructor + static class ObjectRefOnNonIdField implements ReferenceAble { + + @Id String id; + String value; + String refKey1; + String refKey2; + + @Override + public Object toReference() { + return new Document("refKey1", refKey1).append("refKey2", refKey2); + } + } + + static class ReferencableConverter implements Converter { + + @Nullable + @Override + public ObjectReference convert(ReferenceAble source) { + return source::toReference; + } + } + + @WritingConverter + class DocumentToSimpleObjectRefWithReadingConverter + implements Converter, SimpleObjectRefWithReadingConverter> { + + private final MongoTemplate template; + + public DocumentToSimpleObjectRefWithReadingConverter(MongoTemplate template) { + this.template = template; + } + + @Nullable + @Override + public SimpleObjectRefWithReadingConverter convert(ObjectReference source) { + return template.findOne(query(where("id").is(source.getPointer().get("the-ref-key-you-did-not-expect"))), + SimpleObjectRefWithReadingConverter.class); + } + } + + @WritingConverter + class SimpleObjectRefWithReadingConverterToDocumentConverter + implements Converter> { + + @Nullable + @Override + public ObjectReference convert(SimpleObjectRefWithReadingConverter source) { + return () -> new Document("the-ref-key-you-did-not-expect", source.getId()); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java index 2c0f8649e2..84e7e2c2d8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java @@ -115,6 +115,8 @@ public void convertDocumentWithMapDBRef() { when(dbMock.getCollection(anyString(), eq(Document.class))).thenReturn(collectionMock); FindIterable fi = mock(FindIterable.class); + when(fi.limit(anyInt())).thenReturn(fi); + when(fi.sort(any())).thenReturn(fi); when(fi.first()).thenReturn(mapValDocument); when(collectionMock.find(Mockito.any(Bson.class))).thenReturn(fi); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java index d7a2870477..c0a6b8df90 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java @@ -33,7 +33,6 @@ import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; - import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.DocumentTestUtils; @@ -65,6 +64,8 @@ void setUp() { when(factoryMock.getMongoDatabase()).thenReturn(dbMock); when(dbMock.getCollection(anyString(), any(Class.class))).thenReturn(collectionMock); when(collectionMock.find(any(Document.class))).thenReturn(cursorMock); + when(cursorMock.sort(any(Document.class))).thenReturn(cursorMock); + when(cursorMock.spliterator()).thenReturn(Collections. emptyList().spliterator()); resolver = new DefaultDbRefResolver(factoryMock); } @@ -115,7 +116,7 @@ void bulkFetchShouldRestoreOriginalOrder() { DBRef ref1 = new DBRef("collection-1", o1.get("_id")); DBRef ref2 = new DBRef("collection-1", o2.get("_id")); - when(cursorMock.into(any())).then(invocation -> Arrays.asList(o2, o1)); + when(cursorMock.spliterator()).thenReturn(Arrays.asList(o2, o1).spliterator()); assertThat(resolver.bulkFetch(Arrays.asList(ref1, ref2))).containsExactly(o1, o2); } @@ -128,7 +129,7 @@ void bulkFetchContainsDuplicates() { DBRef ref1 = new DBRef("collection-1", document.get("_id")); DBRef ref2 = new DBRef("collection-1", document.get("_id")); - when(cursorMock.into(any())).then(invocation -> Arrays.asList(document)); + when(cursorMock.spliterator()).thenReturn(Arrays.asList(document).spliterator()); assertThat(resolver.bulkFetch(Arrays.asList(ref1, ref2))).containsExactly(document, document); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java index 5006459fc8..f5d43c8ef0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java @@ -17,9 +17,12 @@ import static org.assertj.core.api.Assertions.*; +import java.util.function.Consumer; + import org.springframework.aop.framework.Advised; import org.springframework.cglib.proxy.Factory; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.LazyLoadingInterceptor; +import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.test.util.ReflectionTestUtils; /** @@ -49,8 +52,35 @@ public static void assertProxyIsResolved(Object target, boolean expected) { } } + public static void assertProxy(Object proxy, Consumer verification) { + + LazyLoadingProxyGenerator.LazyLoadingInterceptor interceptor = (LazyLoadingProxyGenerator.LazyLoadingInterceptor) (proxy instanceof Advised ? ((Advised) proxy).getAdvisors()[0].getAdvice() + : ((Factory) proxy).getCallback(0)); + + verification.accept(new LazyLoadingProxyValueRetriever(interceptor)); + } + private static LazyLoadingInterceptor extractInterceptor(Object proxy) { return (LazyLoadingInterceptor) (proxy instanceof Advised ? ((Advised) proxy).getAdvisors()[0].getAdvice() : ((Factory) proxy).getCallback(0)); } + + public static class LazyLoadingProxyValueRetriever { + + LazyLoadingProxyGenerator.LazyLoadingInterceptor interceptor; + + public LazyLoadingProxyValueRetriever(LazyLoadingProxyGenerator.LazyLoadingInterceptor interceptor) { + this.interceptor = interceptor; + } + + public boolean isResolved() { + return (boolean) ReflectionTestUtils.getField(interceptor, "resolved"); + } + + @Unwrapped.Nullable + public Object currentValue() { + return ReflectionTestUtils.getField(interceptor, "result"); + } + + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java index e310d7d298..9aa1bb0b57 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java @@ -18,13 +18,21 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; +import org.bson.conversions.Bson; +import org.springframework.data.mongodb.core.convert.ReferenceLoader; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.ReferenceFilter; +import org.springframework.data.mongodb.core.convert.ReferenceReader; +import org.springframework.data.util.Streamable; +import org.springframework.lang.Nullable; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import java.text.DecimalFormat; import java.util.*; +import java.util.function.BiFunction; import java.util.regex.Pattern; import java.util.stream.Collectors; +import java.util.stream.Stream; import org.bson.Document; import org.bson.types.ObjectId; @@ -96,6 +104,13 @@ public void setUp() throws Exception { context.afterPropertiesSet(); converter = new MappingMongoConverter(new DbRefResolver() { + + @Nullable + @Override + public Object resolveReference(MongoPersistentProperty property, Object source, ReferenceReader referenceReader, BiFunction> lookupFunction) { + return null; + } + @Override public Object resolveDbRef(MongoPersistentProperty property, DBRef dbref, DbRefResolverCallback callback, DbRefProxyHandler proxyHandler) { @@ -117,6 +132,11 @@ public Document fetch(DBRef dbRef) { public List bulkFetch(List dbRefs) { return null; } + + @Override + public ReferenceLoader getReferenceLoader() { + return null; + } }, context); operations = new ReactiveMongoTemplate(mongoDbFactory, converter); diff --git a/spring-data-mongodb/src/test/resources/logback.xml b/spring-data-mongodb/src/test/resources/logback.xml index a36841c97c..f154590864 100644 --- a/spring-data-mongodb/src/test/resources/logback.xml +++ b/spring-data-mongodb/src/test/resources/logback.xml @@ -13,6 +13,7 @@ + From 48ac7e75ba585f05d7af992faf555f82449d250f Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Mon, 3 May 2021 14:09:12 +0200 Subject: [PATCH 016/983] First pass of review polishing. Original pull request: #3647. Closes #3602. --- .../data/mongodb/core/convert/ReferenceLoader.java | 4 ++++ .../data/mongodb/core/convert/ReferenceReader.java | 5 ++++- .../data/mongodb/core/convert/ReferenceResolver.java | 1 + .../data/mongodb/core/mapping/ObjectReference.java | 1 + 4 files changed, 10 insertions(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java index 0bfd30d9b8..184918529e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java @@ -35,8 +35,10 @@ default Document fetch(ReferenceFilter filter, ReferenceContext context) { return bulkFetch(filter, context).findFirst().orElse(null); } + // meh, Stream! Stream bulkFetch(ReferenceFilter filter, ReferenceContext context); + // Reference query interface ReferenceFilter { Bson getFilter(); @@ -45,6 +47,8 @@ default Bson getSort() { return new Document(); } + // TODO: Move apply method into something else that holds the collection and knows about single item/multi-item + // processing default Stream apply(MongoCollection collection) { return restoreOrder(StreamSupport.stream(collection.find(getFilter()).sort(getSort()).spliterator(), false)); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceReader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceReader.java index 84dfb9c38f..e5a16ea431 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceReader.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceReader.java @@ -80,6 +80,7 @@ public ReferenceReader( this.codec = new ParameterBindingDocumentCodec(); } + // TODO: Move documentConversionFunction to here. Having a contextual read allows projections in references Object readReference(MongoPersistentProperty property, Object value, BiFunction> lookupFunction) { @@ -94,6 +95,8 @@ Object readReference(MongoPersistentProperty property, Object value, return result.map(it -> documentConversionFunction.apply(property, it)).collect(Collectors.toList()); } + // TODO: retain target type and extract types here so the conversion function doesn't require type fiddling + // BiFunction instead of MongoPersistentProperty if (property.isMap()) { // the order is a real problem here @@ -165,7 +168,7 @@ private T parseValueOrGet(String value, ParameterBindingContext bindingConte if (!BsonUtils.isJsonDocument(value) && value.contains("?#{")) { String s = "{ 'target-value' : " + value + "}"; - T evaluated = (T) new ParameterBindingDocumentCodec().decode(s, bindingContext).get("target-value "); + T evaluated = (T) codec.decode(s, bindingContext).get("target-value "); return evaluated != null ? evaluated : defaultValue.get(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java index ff08953633..50bc6558d3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java @@ -47,6 +47,7 @@ default Object resolveReference(MongoPersistentProperty property, Object source, ReferenceLoader getReferenceLoader(); + // TODO: ReferenceCollection class ReferenceContext { @Nullable final String database; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ObjectReference.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ObjectReference.java index ed787f66b4..9904b20d3f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ObjectReference.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ObjectReference.java @@ -19,6 +19,7 @@ * @author Christoph Strobl */ @FunctionalInterface +// TODO: ObjectPointer or DocumentPointer public interface ObjectReference { T getPointer(); } From 6ed274bd9bbff846546ebac412cea6be73a2e911 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 5 May 2021 09:44:53 +0200 Subject: [PATCH 017/983] Update entity linking support to derive document pointer from lookup query. Simplify usage by computing the pointer from the lookup. Update the reference documentation, add JavaDoc and refine API. Original pull request: #3647. Closes #3602. --- .../core/convert/DefaultDbRefResolver.java | 15 +- .../core/convert/DefaultReferenceLoader.java | 14 +- .../convert/DefaultReferenceResolver.java | 22 +- .../core/convert/DocumentPointerFactory.java | 135 +++++ .../core/convert/LazyLoadingProxy.java | 11 + .../convert/LazyLoadingProxyGenerator.java | 30 +- .../core/convert/MappingMongoConverter.java | 108 +++- .../mongodb/core/convert/MongoWriter.java | 4 + .../core/convert/NoOpDbRefResolver.java | 7 +- .../mongodb/core/convert/QueryMapper.java | 12 +- .../mongodb/core/convert/ReferenceLoader.java | 38 +- .../mongodb/core/convert/ReferenceReader.java | 136 ++--- .../core/convert/ReferenceResolver.java | 45 +- .../mapping/BasicMongoPersistentProperty.java | 19 + ...ectReference.java => DocumentPointer.java} | 13 +- .../core/mapping/DocumentReference.java | 86 ++- .../core/mapping/MongoPersistentProperty.java | 19 + .../UnwrappedMongoPersistentProperty.java | 11 + .../MongoTemplateDocumentReferenceTests.java | 548 ++++++++++++++++-- .../DefaultDbRefResolverUnitTests.java | 7 +- .../core/convert/QueryMapperUnitTests.java | 29 + .../performance/ReactivePerformanceTests.java | 7 +- ...tractPersonRepositoryIntegrationTests.java | 15 + .../data/mongodb/repository/Person.java | 12 + .../mongodb/repository/PersonRepository.java | 2 + src/main/asciidoc/new-features.adoc | 5 + src/main/asciidoc/reference/mapping.adoc | 365 ++++++++++++ 27 files changed, 1468 insertions(+), 247 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java rename spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/{ObjectReference.java => DocumentPointer.java} (57%) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java index 96b6c6876b..5277fbc0b0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java @@ -46,7 +46,7 @@ import org.springframework.data.mongodb.LazyLoadingException; import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.MongoDatabaseUtils; -import org.springframework.data.mongodb.core.convert.ReferenceLoader.ReferenceFilter; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.lang.Nullable; import org.springframework.objenesis.ObjenesisStd; @@ -117,7 +117,8 @@ public Object resolveDbRef(MongoPersistentProperty property, @Nullable DBRef dbr */ @Override public Document fetch(DBRef dbRef) { - return getReferenceLoader().fetch(ReferenceFilter.singleReferenceFilter(Filters.eq("_id", dbRef.getId())), ReferenceContext.fromDBRef(dbRef)); + return getReferenceLoader().fetch(DocumentReferenceQuery.singleReferenceFilter(Filters.eq("_id", dbRef.getId())), + ReferenceCollection.fromDBRef(dbRef)); } /* @@ -157,9 +158,9 @@ public List bulkFetch(List refs) { databaseSource.getCollectionName()); } - List result = getReferenceLoader() - .bulkFetch(ReferenceFilter.referenceFilter(new Document("_id", new Document("$in", ids))), ReferenceContext.fromDBRef(refs.iterator().next())) - .collect(Collectors.toList()); + List result = mongoCollection // + .find(new Document("_id", new Document("$in", ids))) // + .into(new ArrayList<>()); return ids.stream() // .flatMap(id -> documentWithId(id, result)) // @@ -498,9 +499,9 @@ protected MongoCollection getCollection(DBRef dbref) { .getCollection(dbref.getCollectionName(), Document.class); } - protected MongoCollection getCollection(ReferenceContext context) { + protected MongoCollection getCollection(ReferenceCollection context) { - return MongoDatabaseUtils.getDatabase(context.database, mongoDbFactory).getCollection(context.collection, + return MongoDatabaseUtils.getDatabase(context.getDatabase(), mongoDbFactory).getCollection(context.getCollection(), Document.class); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceLoader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceLoader.java index 27feca163d..66b698077b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceLoader.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceLoader.java @@ -15,21 +15,15 @@ */ package org.springframework.data.mongodb.core.convert; -import java.util.stream.Stream; -import java.util.stream.StreamSupport; - import org.bson.Document; -import org.bson.conversions.Bson; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.MongoDatabaseUtils; -import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceContext; -import org.springframework.lang.Nullable; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection; import org.springframework.util.Assert; import org.springframework.util.StringUtils; -import com.mongodb.client.FindIterable; import com.mongodb.client.MongoCollection; /** @@ -49,7 +43,7 @@ public DefaultReferenceLoader(MongoDatabaseFactory mongoDbFactory) { } @Override - public Stream bulkFetch(ReferenceFilter filter, ReferenceContext context) { + public Iterable bulkFetch(DocumentReferenceQuery filter, ReferenceCollection context) { MongoCollection collection = getCollection(context); @@ -63,9 +57,9 @@ public Stream bulkFetch(ReferenceFilter filter, ReferenceContext conte return filter.apply(collection); } - protected MongoCollection getCollection(ReferenceContext context) { + protected MongoCollection getCollection(ReferenceCollection context) { - return MongoDatabaseUtils.getDatabase(context.database, mongoDbFactory).getCollection(context.collection, + return MongoDatabaseUtils.getDatabase(context.getDatabase(), mongoDbFactory).getCollection(context.getCollection(), Document.class); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java index b4324b505f..0692f719b5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java @@ -15,12 +15,6 @@ */ package org.springframework.data.mongodb.core.convert; -import java.util.function.BiFunction; -import java.util.stream.Stream; - -import org.bson.Document; -import org.bson.conversions.Bson; -import org.springframework.data.mongodb.core.convert.ReferenceLoader.ReferenceFilter; import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.lang.Nullable; @@ -44,24 +38,26 @@ public ReferenceLoader getReferenceLoader() { @Nullable @Override public Object resolveReference(MongoPersistentProperty property, Object source, ReferenceReader referenceReader, - BiFunction> lookupFunction) { + LookupFunction lookupFunction, ResultConversionFunction resultConversionFunction) { if (isLazyReference(property)) { - return createLazyLoadingProxy(property, source, referenceReader, lookupFunction); + return createLazyLoadingProxy(property, source, referenceReader, lookupFunction, resultConversionFunction); } - return referenceReader.readReference(property, source, lookupFunction); + return referenceReader.readReference(property, source, lookupFunction, resultConversionFunction); } private Object createLazyLoadingProxy(MongoPersistentProperty property, Object source, - ReferenceReader referenceReader, BiFunction> lookupFunction) { - return new LazyLoadingProxyGenerator(referenceReader).createLazyLoadingProxy(property, source, lookupFunction); + ReferenceReader referenceReader, LookupFunction lookupFunction, + ResultConversionFunction resultConversionFunction) { + return new LazyLoadingProxyGenerator(referenceReader).createLazyLoadingProxy(property, source, lookupFunction, + resultConversionFunction); } protected boolean isLazyReference(MongoPersistentProperty property) { - if (property.findAnnotation(DocumentReference.class) != null) { - return property.findAnnotation(DocumentReference.class).lazy(); + if (property.isDocumentReference()) { + return property.getDocumentReference().lazy(); } return property.getDBRef() != null && property.getDBRef().lazy(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java new file mode 100644 index 0000000000..a91a48d922 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java @@ -0,0 +1,135 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Map.Entry; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.bson.Document; +import org.springframework.core.convert.ConversionService; +import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mapping.model.BeanWrapperPropertyAccessorFactory; +import org.springframework.data.mongodb.core.mapping.DocumentPointer; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; + +/** + * @author Christoph Strobl + * @since 3.3 + */ +class DocumentPointerFactory { + + private ConversionService conversionService; + private MappingContext, MongoPersistentProperty> mappingContext; + private Map linkageMap; + + public DocumentPointerFactory(ConversionService conversionService, + MappingContext, MongoPersistentProperty> mappingContext) { + + this.conversionService = conversionService; + this.mappingContext = mappingContext; + this.linkageMap = new HashMap<>(); + } + + public DocumentPointer computePointer(MongoPersistentProperty property, Object value, Class typeHint) { + + if (value instanceof LazyLoadingProxy) { + return () -> ((LazyLoadingProxy) value).getSource(); + } + + if (conversionService.canConvert(typeHint, DocumentPointer.class)) { + return conversionService.convert(value, DocumentPointer.class); + } else { + + MongoPersistentEntity persistentEntity = mappingContext + .getPersistentEntity(property.getAssociationTargetType()); + + if (!property.getDocumentReference().lookup().toLowerCase().replaceAll("\\s", "").replaceAll("'", "") + .equals("{_id:?#{#target}}")) { + + return () -> linkageMap.computeIfAbsent(property.getDocumentReference().lookup(), key -> { + return new LinkageDocument(key); + }).get(persistentEntity, + BeanWrapperPropertyAccessorFactory.INSTANCE.getPropertyAccessor(property.getOwner(), value)); + } + + // just take the id as a reference + return () -> persistentEntity.getIdentifierAccessor(value).getIdentifier(); + } + } + + static class LinkageDocument { + + String lookup; + org.bson.Document fetchDocument; + Map mapMap; + + public LinkageDocument(String lookup) { + + this.lookup = lookup; + String targetLookup = lookup; + + Pattern pattern = Pattern.compile("\\?#\\{#?[\\w\\d]*\\}"); + + Matcher matcher = pattern.matcher(lookup); + int index = 0; + mapMap = new LinkedHashMap<>(); + while (matcher.find()) { + + String expr = matcher.group(); + mapMap.put(Integer.valueOf(index), expr.substring(0, expr.length() - 1).replace("?#{#", "").replace("?#{", "") + .replace("target.", "").replaceAll("'", "")); + targetLookup = targetLookup.replace(expr, index + ""); + index++; + } + + fetchDocument = org.bson.Document.parse(targetLookup); + } + + org.bson.Document get(MongoPersistentEntity persistentEntity, PersistentPropertyAccessor propertyAccessor) { + + org.bson.Document targetDocument = new Document(); + + // TODO: recursive matching over nested Documents or would the parameter binding json parser be a thing? + // like we have it ordered by index values and could provide the parameter array from it. + + for (Entry entry : fetchDocument.entrySet()) { + + if (entry.getKey().equals("target")) { + + String refKey = mapMap.get(entry.getValue()); + + if (persistentEntity.hasIdProperty()) { + targetDocument.put(refKey, propertyAccessor.getProperty(persistentEntity.getIdProperty())); + } else { + targetDocument.put(refKey, propertyAccessor.getBean()); + } + continue; + } + + Object target = propertyAccessor.getProperty(persistentEntity.getPersistentProperty(entry.getKey())); + String refKey = mapMap.get(entry.getValue()); + targetDocument.put(refKey, target); + } + return targetDocument; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxy.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxy.java index a04a100cc5..8be7111988 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxy.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxy.java @@ -46,4 +46,15 @@ public interface LazyLoadingProxy { */ @Nullable DBRef toDBRef(); + + /** + * Returns the raw {@literal source} object that defines the reference. + * + * @return can be {@literal null}. + * @since 3.3 + */ + @Nullable + default Object getSource() { + return toDBRef(); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyGenerator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyGenerator.java index 35da1e1e23..570a516d9b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyGenerator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyGenerator.java @@ -19,23 +19,19 @@ import java.io.Serializable; import java.lang.reflect.Method; -import java.util.function.BiFunction; -import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.aopalliance.intercept.MethodInterceptor; import org.aopalliance.intercept.MethodInvocation; -import org.bson.Document; -import org.bson.conversions.Bson; import org.springframework.aop.framework.ProxyFactory; import org.springframework.cglib.proxy.Callback; import org.springframework.cglib.proxy.Enhancer; import org.springframework.cglib.proxy.Factory; import org.springframework.cglib.proxy.MethodProxy; -import org.springframework.data.mongodb.core.convert.ReferenceLoader.ReferenceFilter; -import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceContext; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.LookupFunction; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.ResultConversionFunction; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.objenesis.ObjenesisStd; import org.springframework.util.ReflectionUtils; @@ -54,11 +50,12 @@ public LazyLoadingProxyGenerator(ReferenceReader referenceReader) { this.objenesis = new ObjenesisStd(true); } - public Object createLazyLoadingProxy(MongoPersistentProperty property, Object source, - BiFunction> lookupFunction) { + public Object createLazyLoadingProxy(MongoPersistentProperty property, Object source, LookupFunction lookupFunction, + ResultConversionFunction resultConversionFunction) { Class propertyType = property.getType(); - LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, source, referenceReader, lookupFunction); + LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, source, referenceReader, lookupFunction, + resultConversionFunction); if (!propertyType.isInterface()) { @@ -105,27 +102,30 @@ public static class LazyLoadingInterceptor private volatile boolean resolved; private @org.springframework.lang.Nullable Object result; private Object source; - private BiFunction> lookupFunction; + private LookupFunction lookupFunction; + private ResultConversionFunction resultConversionFunction; - private final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD; + private final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD, GET_SOURCE_METHOD; { try { INITIALIZE_METHOD = LazyLoadingProxy.class.getMethod("getTarget"); TO_DBREF_METHOD = LazyLoadingProxy.class.getMethod("toDBRef"); FINALIZE_METHOD = Object.class.getDeclaredMethod("finalize"); + GET_SOURCE_METHOD = LazyLoadingProxy.class.getMethod("getSource"); } catch (Exception e) { throw new RuntimeException(e); } } public LazyLoadingInterceptor(MongoPersistentProperty property, Object source, ReferenceReader reader, - BiFunction> lookupFunction) { + LookupFunction lookupFunction, ResultConversionFunction resultConversionFunction) { this.property = property; this.source = source; this.referenceReader = reader; this.lookupFunction = lookupFunction; + this.resultConversionFunction = resultConversionFunction; } @Nullable @@ -145,6 +145,10 @@ public Object intercept(Object o, Method method, Object[] args, MethodProxy prox return null; } + if (GET_SOURCE_METHOD.equals(method)) { + return source; + } + if (isObjectMethod(method) && Object.class.equals(method.getDeclaringClass())) { if (ReflectionUtils.isToStringMethod(method)) { @@ -234,7 +238,7 @@ private synchronized Object resolve() { // property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName()); // } - return referenceReader.readReference(property, source, lookupFunction); + return referenceReader.readReference(property, source, lookupFunction, resultConversionFunction); } catch (RuntimeException ex) { throw ex; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 0d3378d39f..2ad4d75230 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -63,10 +63,9 @@ import org.springframework.data.mapping.model.SpELExpressionParameterValueProvider; import org.springframework.data.mongodb.CodecRegistryProvider; import org.springframework.data.mongodb.MongoDatabaseFactory; -import org.springframework.data.mongodb.core.mapping.DocumentReference; +import org.springframework.data.mongodb.core.mapping.DocumentPointer; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.data.mongodb.core.mapping.ObjectReference; import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.data.mongodb.core.mapping.Unwrapped.OnEmpty; import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback; @@ -124,6 +123,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App private SpELContext spELContext; private @Nullable EntityCallbacks entityCallbacks; + private DocumentPointerFactory documentPointerFactory; /** * Creates a new {@link MappingMongoConverter} given the new {@link DbRefResolver} and {@link MappingContext}. @@ -154,8 +154,8 @@ public MappingMongoConverter(DbRefResolver dbRefResolver, return MappingMongoConverter.this.getValueInternal(context, prop, bson, evaluator); }); - this.referenceReader = new ReferenceReader(mappingContext, - (prop, document) -> this.read(prop.getActualType(), document), () -> spELContext); + this.referenceReader = new ReferenceReader(mappingContext, () -> spELContext); + this.documentPointerFactory = new DocumentPointerFactory(conversionService, mappingContext); } /** @@ -366,6 +366,14 @@ private S read(ConversionContext context, MongoPersistentEnti SpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(bson, spELContext); DocumentAccessor documentAccessor = new DocumentAccessor(bson); + if (bson.get("_id") != null) { + + Object existing = context.getPath().getPathItem(bson.get("_id"), entity.getCollection(), entity.getType()); + if (existing != null) { + return (S) existing; + } + } + PreferredConstructor persistenceConstructor = entity.getPersistenceConstructor(); ParameterValueProvider provider = persistenceConstructor != null @@ -376,6 +384,7 @@ private S read(ConversionContext context, MongoPersistentEnti S instance = instantiator.createInstance(entity, provider); if (entity.requiresPropertyPopulation()) { + return populateProperties(context, entity, documentAccessor, evaluator, instance); } @@ -451,7 +460,8 @@ private void readProperties(ConversionContext context, MongoPersistentEntity callback = getDbRefResolverCallback(context, documentAccessor, evaluator); } - readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback); + readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback, context, + evaluator); continue; } @@ -478,7 +488,8 @@ private void readProperties(ConversionContext context, MongoPersistentEntity callback = getDbRefResolverCallback(context, documentAccessor, evaluator); } - readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback); + readAssociation(prop.getRequiredAssociation(), accessor, documentAccessor, dbRefProxyHandler, callback, context, + evaluator); continue; } @@ -494,7 +505,8 @@ private DbRefResolverCallback getDbRefResolverCallback(ConversionContext context } private void readAssociation(Association association, PersistentPropertyAccessor accessor, - DocumentAccessor documentAccessor, DbRefProxyHandler handler, DbRefResolverCallback callback) { + DocumentAccessor documentAccessor, DbRefProxyHandler handler, DbRefResolverCallback callback, + ConversionContext context, SpELExpressionEvaluator evaluator) { MongoPersistentProperty property = association.getInverse(); final Object value = documentAccessor.get(property); @@ -503,26 +515,32 @@ private void readAssociation(Association association, P return; } - if (property.isAnnotationPresent(DocumentReference.class)) { + if (property.isDocumentReference()) { // quite unusual but sounds like worth having? - if (conversionService.canConvert(ObjectReference.class, property.getActualType())) { + if (conversionService.canConvert(DocumentPointer.class, property.getActualType())) { - // collection like special treatment - accessor.setProperty(property, conversionService.convert(new ObjectReference() { + DocumentPointer pointer = new DocumentPointer() { @Override public Object getPointer() { return value; } - }, property.getActualType())); + }; + + // collection like special treatment + accessor.setProperty(property, conversionService.convert(pointer, property.getActualType())); } else { - accessor.setProperty(property, dbRefResolver.resolveReference(property, value, referenceReader)); + accessor.setProperty(property, + dbRefResolver.resolveReference(property, value, referenceReader, context::convert)); } return; } DBRef dbref = value instanceof DBRef ? (DBRef) value : null; + + // TODO: accessor.setProperty(property, dbRefResolver.resolveReference(property, value, referenceReader, + // context::convert)); accessor.setProperty(property, dbRefResolver.resolveDbRef(property, dbref, callback, handler)); } @@ -563,6 +581,45 @@ public DBRef toDBRef(Object object, @Nullable MongoPersistentProperty referringP return createDBRef(object, referringProperty); } + public Object toDocumentReference(Object source, @Nullable MongoPersistentProperty referringProperty) { + + if (source instanceof LazyLoadingProxy) { + return ((LazyLoadingProxy) source).getSource(); + } + + if (referringProperty != null) { + + if (referringProperty.isDbReference()) { + return toDBRef(source, referringProperty); + } + if (referringProperty.isDocumentReference()) { + return createDocumentPointer(source, referringProperty); + } + } + + throw new RuntimeException("oops - what's that " + source); + } + + Object createDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) { + + if (referringProperty == null) { + return source; + } + + if (ClassUtils.isAssignableValue(referringProperty.getType(), source) + && conversionService.canConvert(referringProperty.getType(), DocumentPointer.class)) { + return conversionService.convert(source, DocumentPointer.class).getPointer(); + } + + if (ClassUtils.isAssignableValue(referringProperty.getAssociationTargetType(), source)) { + return documentPointerFactory.computePointer(referringProperty, source, referringProperty.getActualType()) + .getPointer(); + + } + + return source; + } + /** * Root entry method into write conversion. Adds a type discriminator to the {@link Document}. Shouldn't be called for * nested conversions. @@ -749,13 +806,8 @@ protected void writePropertyInternal(@Nullable Object obj, DocumentAccessor acce if (prop.isAssociation()) { - if (conversionService.canConvert(valueType.getType(), ObjectReference.class)) { - accessor.put(prop, conversionService.convert(obj, ObjectReference.class).getPointer()); - } else { - // just take the id as a reference - accessor.put(prop, mappingContext.getPersistentEntity(prop.getAssociationTargetType()) - .getIdentifierAccessor(obj).getIdentifier()); - } + accessor.put(prop, new DocumentPointerFactory(conversionService, mappingContext) + .computePointer(prop, obj, valueType.getType()).getPointer()); return; } @@ -799,14 +851,14 @@ protected List createCollection(Collection collection, MongoPersisten if (property.isAssociation()) { return writeCollectionInternal(collection.stream().map(it -> { - if (conversionService.canConvert(it.getClass(), ObjectReference.class)) { - return conversionService.convert(it, ObjectReference.class).getPointer(); + if (conversionService.canConvert(it.getClass(), DocumentPointer.class)) { + return conversionService.convert(it, DocumentPointer.class).getPointer(); } else { // just take the id as a reference return mappingContext.getPersistentEntity(property.getAssociationTargetType()).getIdentifierAccessor(it) .getIdentifier(); } - }).collect(Collectors.toList()), ClassTypeInformation.from(ObjectReference.class), new BasicDBList()); + }).collect(Collectors.toList()), ClassTypeInformation.from(DocumentPointer.class), new BasicDBList()); } if (property.hasExplicitWriteTarget()) { @@ -855,15 +907,15 @@ protected Bson createMap(Map map, MongoPersistentProperty proper if (conversions.isSimpleType(key.getClass())) { String simpleKey = prepareMapKey(key.toString()); - if(property.isDbReference()) { + if (property.isDbReference()) { document.put(simpleKey, value != null ? createDBRef(value, property) : null); } else { - if (conversionService.canConvert(value.getClass(), ObjectReference.class)) { - document.put(simpleKey, conversionService.convert(value, ObjectReference.class).getPointer()); + if (conversionService.canConvert(value.getClass(), DocumentPointer.class)) { + document.put(simpleKey, conversionService.convert(value, DocumentPointer.class).getPointer()); } else { // just take the id as a reference - document.put(simpleKey, mappingContext.getPersistentEntity(property.getAssociationTargetType()).getIdentifierAccessor(value) - .getIdentifier()); + document.put(simpleKey, mappingContext.getPersistentEntity(property.getAssociationTargetType()) + .getIdentifierAccessor(value).getIdentifier()); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoWriter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoWriter.java index 0f64177bca..779b3236d3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoWriter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoWriter.java @@ -70,4 +70,8 @@ default Object convertToMongoType(@Nullable Object obj, MongoPersistentEntity * @return will never be {@literal null}. */ DBRef toDBRef(Object object, @Nullable MongoPersistentProperty referingProperty); + + default Object toDocumentReference(Object source, @Nullable MongoPersistentProperty referringProperty) { + return toDBRef(source, referringProperty); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java index cbd02ee74d..8b6c969439 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java @@ -20,9 +20,9 @@ import java.util.stream.Stream; import org.bson.Document; -import org.bson.conversions.Bson; -import org.springframework.data.mongodb.core.convert.ReferenceLoader.ReferenceFilter; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; import com.mongodb.DBRef; @@ -77,7 +77,8 @@ private T handle() throws UnsupportedOperationException { @Nullable @Override public Object resolveReference(MongoPersistentProperty property, Object source, ReferenceReader referenceReader, - BiFunction> lookupFunction) { + LookupFunction lookupFunction, + ResultConversionFunction resultConversionFunction) { return null; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index af93fdd634..36353e4f86 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -605,7 +605,7 @@ protected Object convertAssociation(@Nullable Object source, @Nullable MongoPers if (source instanceof Iterable) { BasicDBList result = new BasicDBList(); for (Object element : (Iterable) source) { - result.add(createDbRefFor(element, property)); + result.add(createReferenceFor(element, property)); } return result; } @@ -614,12 +614,12 @@ protected Object convertAssociation(@Nullable Object source, @Nullable MongoPers Document result = new Document(); Document dbObject = (Document) source; for (String key : dbObject.keySet()) { - result.put(key, createDbRefFor(dbObject.get(key), property)); + result.put(key, createReferenceFor(dbObject.get(key), property)); } return result; } - return createDbRefFor(source, property); + return createReferenceFor(source, property); } /** @@ -666,12 +666,16 @@ private Entry createMapEntry(String key, @Nullable Object value) return Collections.singletonMap(key, value).entrySet().iterator().next(); } - private DBRef createDbRefFor(Object source, MongoPersistentProperty property) { + private Object createReferenceFor(Object source, MongoPersistentProperty property) { if (source instanceof DBRef) { return (DBRef) source; } + if(property != null && property.isDocumentReference()) { + return converter.toDocumentReference(source, property); + } + return converter.toDBRef(source, property); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java index 184918529e..d5c72afad8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java @@ -15,12 +15,12 @@ */ package org.springframework.data.mongodb.core.convert; -import java.util.stream.Stream; -import java.util.stream.StreamSupport; +import java.util.Collections; +import java.util.Iterator; import org.bson.Document; import org.bson.conversions.Bson; -import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceContext; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection; import org.springframework.lang.Nullable; import com.mongodb.client.MongoCollection; @@ -31,15 +31,15 @@ public interface ReferenceLoader { @Nullable - default Document fetch(ReferenceFilter filter, ReferenceContext context) { - return bulkFetch(filter, context).findFirst().orElse(null); + default Document fetch(DocumentReferenceQuery filter, ReferenceCollection context) { + + Iterator it = bulkFetch(filter, context).iterator(); + return it.hasNext() ? it.next() : null; } - // meh, Stream! - Stream bulkFetch(ReferenceFilter filter, ReferenceContext context); + Iterable bulkFetch(DocumentReferenceQuery filter, ReferenceCollection context); - // Reference query - interface ReferenceFilter { + interface DocumentReferenceQuery { Bson getFilter(); @@ -49,21 +49,21 @@ default Bson getSort() { // TODO: Move apply method into something else that holds the collection and knows about single item/multi-item // processing - default Stream apply(MongoCollection collection) { - return restoreOrder(StreamSupport.stream(collection.find(getFilter()).sort(getSort()).spliterator(), false)); + default Iterable apply(MongoCollection collection) { + return restoreOrder(collection.find(getFilter()).sort(getSort())); } - - default Stream restoreOrder(Stream stream) { - return stream; + + default Iterable restoreOrder(Iterable documents) { + return documents; } - static ReferenceFilter referenceFilter(Bson bson) { + static DocumentReferenceQuery referenceFilter(Bson bson) { return () -> bson; } - static ReferenceFilter singleReferenceFilter(Bson bson) { + static DocumentReferenceQuery singleReferenceFilter(Bson bson) { - return new ReferenceFilter() { + return new DocumentReferenceQuery() { @Override public Bson getFilter() { @@ -71,10 +71,10 @@ public Bson getFilter() { } @Override - public Stream apply(MongoCollection collection) { + public Iterable apply(MongoCollection collection) { Document result = collection.find(getFilter()).sort(getSort()).limit(1).first(); - return result != null ? Stream.of(result) : Stream.empty(); + return result != null ? Collections.singleton(result) : Collections.emptyList(); } }; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceReader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceReader.java index e5a16ea431..fb37367b1d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceReader.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceReader.java @@ -17,24 +17,24 @@ import java.util.ArrayList; import java.util.Collection; -import java.util.Iterator; +import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.Set; -import java.util.function.BiFunction; import java.util.function.Supplier; import java.util.stream.Collectors; -import java.util.stream.Stream; import org.bson.Document; import org.bson.conversions.Bson; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mapping.model.SpELContext; -import org.springframework.data.mongodb.core.convert.ReferenceLoader.ReferenceFilter; -import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceContext; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.LookupFunction; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.ResultConversionFunction; import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; @@ -56,61 +56,47 @@ */ public class ReferenceReader { - private final ParameterBindingDocumentCodec codec; - private final Lazy, MongoPersistentProperty>> mappingContext; - private final BiFunction documentConversionFunction; private final Supplier spelContextSupplier; + private final ParameterBindingDocumentCodec codec; public ReferenceReader(MappingContext, MongoPersistentProperty> mappingContext, - BiFunction documentConversionFunction, Supplier spelContextSupplier) { - this(() -> mappingContext, documentConversionFunction, spelContextSupplier); + this(() -> mappingContext, spelContextSupplier); } public ReferenceReader( Supplier, MongoPersistentProperty>> mappingContextSupplier, - BiFunction documentConversionFunction, Supplier spelContextSupplier) { this.mappingContext = Lazy.of(mappingContextSupplier); - this.documentConversionFunction = documentConversionFunction; this.spelContextSupplier = spelContextSupplier; this.codec = new ParameterBindingDocumentCodec(); } - // TODO: Move documentConversionFunction to here. Having a contextual read allows projections in references - Object readReference(MongoPersistentProperty property, Object value, - BiFunction> lookupFunction) { + Object readReference(MongoPersistentProperty property, Object value, LookupFunction lookupFunction, + ResultConversionFunction resultConversionFunction) { SpELContext spELContext = spelContextSupplier.get(); - ReferenceFilter filter = computeFilter(property, value, spELContext); - ReferenceContext referenceContext = computeReferenceContext(property, value, spELContext); + DocumentReferenceQuery filter = computeFilter(property, value, spELContext); + ReferenceCollection referenceCollection = computeReferenceContext(property, value, spELContext); - Stream result = lookupFunction.apply(referenceContext, filter); + Iterable result = lookupFunction.apply(filter, referenceCollection); - if (property.isCollectionLike()) { - return result.map(it -> documentConversionFunction.apply(property, it)).collect(Collectors.toList()); + if (!result.iterator().hasNext()) { + return null; } - // TODO: retain target type and extract types here so the conversion function doesn't require type fiddling - // BiFunction instead of MongoPersistentProperty - if (property.isMap()) { - - // the order is a real problem here - Iterator keyIterator = ((Map) value).keySet().iterator(); - return result.map(it -> it.entrySet().stream().collect(Collectors.toMap(key -> key.getKey(), val -> { - Object apply = documentConversionFunction.apply(property, (Document) val.getValue()); - return apply; - }))).findFirst().orElse(null); + if (property.isCollectionLike()) { + return resultConversionFunction.apply(result, property.getTypeInformation()); } - return result.map(it -> documentConversionFunction.apply(property, it)).findFirst().orElse(null); + return resultConversionFunction.apply(result.iterator().next(), property.getTypeInformation()); } - private ReferenceContext computeReferenceContext(MongoPersistentProperty property, Object value, + private ReferenceCollection computeReferenceContext(MongoPersistentProperty property, Object value, SpELContext spELContext) { if (value instanceof Iterable) { @@ -118,44 +104,43 @@ private ReferenceContext computeReferenceContext(MongoPersistentProperty propert } if (value instanceof DBRef) { - return ReferenceContext.fromDBRef((DBRef) value); + return ReferenceCollection.fromDBRef((DBRef) value); } if (value instanceof Document) { Document ref = (Document) value; - if (property.isAnnotationPresent(DocumentReference.class)) { + if (property.isDocumentReference()) { ParameterBindingContext bindingContext = bindingContext(property, value, spELContext); - DocumentReference documentReference = property.getRequiredAnnotation(DocumentReference.class); + DocumentReference documentReference = property.getDocumentReference(); String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext, () -> ref.get("db", String.class)); String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext, () -> ref.get("collection", mappingContext.get().getPersistentEntity(property.getAssociationTargetType()).getCollection())); - return new ReferenceContext(targetDatabase, targetCollection); + return new ReferenceCollection(targetDatabase, targetCollection); } - return new ReferenceContext(ref.getString("db"), ref.get("collection", + return new ReferenceCollection(ref.getString("db"), ref.get("collection", mappingContext.get().getPersistentEntity(property.getAssociationTargetType()).getCollection())); } - if (property.isAnnotationPresent(DocumentReference.class)) { + if (property.isDocumentReference()) { ParameterBindingContext bindingContext = bindingContext(property, value, spELContext); - DocumentReference documentReference = property.getRequiredAnnotation(DocumentReference.class); + DocumentReference documentReference = property.getDocumentReference(); String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext, () -> null); String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext, () -> mappingContext.get().getPersistentEntity(property.getAssociationTargetType()).getCollection()); - Document sort = parseValueOrGet(documentReference.sort(), bindingContext, () -> null); - return new ReferenceContext(targetDatabase, targetCollection); + return new ReferenceCollection(targetDatabase, targetCollection); } - return new ReferenceContext(null, + return new ReferenceCollection(null, mappingContext.get().getPersistentEntity(property.getAssociationTargetType()).getCollection()); } @@ -201,9 +186,9 @@ EvaluationContext evaluationContextFor(MongoPersistentProperty property, Object return ctx; } - ReferenceFilter computeFilter(MongoPersistentProperty property, Object value, SpELContext spELContext) { + DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object value, SpELContext spELContext) { - DocumentReference documentReference = property.getRequiredAnnotation(DocumentReference.class); + DocumentReference documentReference = property.getDocumentReference(); String lookup = documentReference.lookup(); Document sort = parseValueOrGet(documentReference.sort(), bindingContext(property, value, spELContext), () -> null); @@ -217,7 +202,7 @@ ReferenceFilter computeFilter(MongoPersistentProperty property, Object value, Sp ors.add(decoded); } - return new ListReferenceFilter(new Document("$or", ors), sort); + return new ListDocumentReferenceQuery(new Document("$or", ors), sort); } if (property.isMap() && value instanceof Map) { @@ -230,18 +215,18 @@ ReferenceFilter computeFilter(MongoPersistentProperty property, Object value, Sp filterMap.put(entry.getKey(), decoded); } - return new MapReferenceFilter(new Document("$or", filterMap.values()), sort, filterMap); + return new MapDocumentReferenceQuery(new Document("$or", filterMap.values()), sort, filterMap); } - return new SingleReferenceFilter(codec.decode(lookup, bindingContext(property, value, spELContext)), sort); + return new SingleDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, value, spELContext)), sort); } - static class SingleReferenceFilter implements ReferenceFilter { + static class SingleDocumentReferenceQuery implements DocumentReferenceQuery { Document filter; Document sort; - public SingleReferenceFilter(Document filter, Document sort) { + public SingleDocumentReferenceQuery(Document filter, Document sort) { this.filter = filter; this.sort = sort; } @@ -252,24 +237,24 @@ public Bson getFilter() { } @Override - public Stream apply(MongoCollection collection) { + public Iterable apply(MongoCollection collection) { Document result = collection.find(getFilter()).limit(1).first(); - return result != null ? Stream.of(result) : Stream.empty(); + return result != null ? Collections.singleton(result) : Collections.emptyList(); } } - static class MapReferenceFilter implements ReferenceFilter { + static class MapDocumentReferenceQuery implements DocumentReferenceQuery { - Document filter; - Document sort; - Map filterOrderMap; + private final Document filter; + private final Document sort; + private final Map filterOrderMap; - public MapReferenceFilter(Document filter, Document sort, Map filterOrderMap) { + public MapDocumentReferenceQuery(Document filter, Document sort, Map filterOrderMap) { this.filter = filter; - this.filterOrderMap = filterOrderMap; this.sort = sort; + this.filterOrderMap = filterOrderMap; } @Override @@ -283,45 +268,46 @@ public Bson getSort() { } @Override - public Stream restoreOrder(Stream stream) { + public Iterable restoreOrder(Iterable documents) { Map targetMap = new LinkedHashMap<>(); - List collected = stream.collect(Collectors.toList()); + List collected = documents instanceof List ? (List) documents + : Streamable.of(documents).toList(); for (Entry filterMapping : filterOrderMap.entrySet()) { - String key = filterMapping.getKey().toString(); - Optional first = collected.stream().filter(it -> { + Optional first = collected.stream() + .filter(it -> it.entrySet().containsAll(filterMapping.getValue().entrySet())).findFirst(); - boolean found = it.entrySet().containsAll(filterMapping.getValue().entrySet()); - return found; - }).findFirst(); - - targetMap.put(key, first.orElse(null)); + targetMap.put(filterMapping.getKey().toString(), first.orElse(null)); } - return Stream.of(new Document(targetMap)); + return Collections.singleton(new Document(targetMap)); } } - static class ListReferenceFilter implements ReferenceFilter { + static class ListDocumentReferenceQuery implements DocumentReferenceQuery { - Document filter; - Document sort; + private final Document filter; + private final Document sort; + + public ListDocumentReferenceQuery(Document filter, Document sort) { - public ListReferenceFilter(Document filter, Document sort) { this.filter = filter; this.sort = sort; } @Override - public Stream restoreOrder(Stream stream) { + public Iterable restoreOrder(Iterable documents) { if (filter.containsKey("$or")) { List ors = filter.get("$or", List.class); - return stream.sorted((o1, o2) -> compareAgainstReferenceIndex(ors, o1, o2)); + List target = documents instanceof List ? (List) documents + : Streamable.of(documents).toList(); + return target.stream().sorted((o1, o2) -> compareAgainstReferenceIndex(ors, o1, o2)) + .collect(Collectors.toList()); } - return stream; + return documents; } public Document getFilter() { @@ -347,7 +333,5 @@ int compareAgainstReferenceIndex(List referenceList, Document document } return referenceList.size(); } - } - } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java index 50bc6558d3..f29dc16a7c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java @@ -15,13 +15,12 @@ */ package org.springframework.data.mongodb.core.convert; -import java.util.function.BiFunction; -import java.util.stream.Stream; +import java.util.Collections; import org.bson.Document; -import org.bson.conversions.Bson; -import org.springframework.data.mongodb.core.convert.ReferenceLoader.ReferenceFilter; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; import com.mongodb.DBRef; @@ -33,34 +32,38 @@ public interface ReferenceResolver { @Nullable Object resolveReference(MongoPersistentProperty property, Object source, ReferenceReader referenceReader, - BiFunction> lookupFunction); + LookupFunction lookupFunction, ResultConversionFunction resultConversionFunction); - default Object resolveReference(MongoPersistentProperty property, Object source, ReferenceReader referenceReader) { - return resolveReference(property, source, referenceReader, (ctx, filter) -> { + default Object resolveReference(MongoPersistentProperty property, Object source, ReferenceReader referenceReader, + ResultConversionFunction resultConversionFunction) { + + return resolveReference(property, source, referenceReader, (filter, ctx) -> { if (property.isCollectionLike() || property.isMap()) { return getReferenceLoader().bulkFetch(filter, ctx); + } + Object target = getReferenceLoader().fetch(filter, ctx); - return target == null ? Stream.empty() : Stream.of(getReferenceLoader().fetch(filter, ctx)); - }); + return target == null ? Collections.emptyList() : Collections.singleton(getReferenceLoader().fetch(filter, ctx)); + }, resultConversionFunction); } ReferenceLoader getReferenceLoader(); - // TODO: ReferenceCollection - class ReferenceContext { + class ReferenceCollection { - @Nullable final String database; - final String collection; + @Nullable + private final String database; + private final String collection; - public ReferenceContext(@Nullable String database, String collection) { + public ReferenceCollection(@Nullable String database, String collection) { this.database = database; this.collection = collection; } - static ReferenceContext fromDBRef(DBRef dbRef) { - return new ReferenceContext(dbRef.getDatabaseName(), dbRef.getCollectionName()); + static ReferenceCollection fromDBRef(DBRef dbRef) { + return new ReferenceCollection(dbRef.getDatabaseName(), dbRef.getCollectionName()); } public String getCollection() { @@ -72,4 +75,14 @@ public String getDatabase() { return database; } } + + @FunctionalInterface + interface LookupFunction { + Iterable apply(DocumentReferenceQuery referenceQuery, ReferenceCollection referenceCollection); + } + + @FunctionalInterface + interface ResultConversionFunction { + Object apply(Object source, TypeInformation property); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java index 0b47c79d04..b7b71a7fee 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java @@ -231,6 +231,15 @@ public boolean isDbReference() { return isAnnotationPresent(DBRef.class); } + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isDocumentReference() + */ + @Override + public boolean isDocumentReference() { + return isAnnotationPresent(DocumentReference.class); + } + /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#getDBRef() @@ -240,6 +249,16 @@ public DBRef getDBRef() { return findAnnotation(DBRef.class); } + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#getDocumentReference() + */ + @Nullable + @Override + public DocumentReference getDocumentReference() { + return findAnnotation(DocumentReference.class); + } + /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isLanguageProperty() diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ObjectReference.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentPointer.java similarity index 57% rename from spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ObjectReference.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentPointer.java index 9904b20d3f..de7fbff866 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/ObjectReference.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentPointer.java @@ -16,10 +16,19 @@ package org.springframework.data.mongodb.core.mapping; /** + * A custom pointer to a linked document to be used along with {@link DocumentReference} for storing the linkage value. + * * @author Christoph Strobl */ @FunctionalInterface -// TODO: ObjectPointer or DocumentPointer -public interface ObjectReference { +public interface DocumentPointer { + + /** + * The actual pointer value. This can be any simple type, like a {@link String} or {@link org.bson.types.ObjectId} or + * a {@link org.bson.Document} holding more information like the target collection, multiple fields forming the key, + * etc. + * + * @return the value stored in MongoDB and used for constructing the {@link DocumentReference#lookup() lookup query}. + */ T getPointer(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java index d9af6ccee1..0846c4022c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java @@ -24,8 +24,69 @@ import org.springframework.data.annotation.Reference; /** + * A {@link DocumentReference} offers an alternative way of linking entities in MongoDB. While the goal is the same as + * when using {@link DBRef}, the store representation is different and can be literally anything, a single value, an + * entire {@link org.bson.Document}, basically everything that can be stored in MongoDB. By default, the mapping layer + * will use the referenced entities {@literal id} value for storage and retrieval. + * + *
        + * public class Account {
        + *   private String id;
        + *   private Float total;
        + * }
        + *
        + * public class Person {
        + *   private String id;
        + *   @DocumentReference
        + *   private List<Account> accounts;
        + * }
        + * 
        + * Account account = ...
        + *
        + * mongoTemplate.insert(account);
        + *
        + * template.update(Person.class)
        + *   .matching(where("id").is(...))
        + *   .apply(new Update().push("accounts").value(account))
        + *   .first();
        + * 
        + * + * {@link #lookup()} allows to define custom queries that are independent from the {@literal id} field and in + * combination with {@link org.springframework.data.convert.WritingConverter writing converters} offer a flexible way of + * defining links between entities. + * + *
        + * public class Book {
        + * 	 private ObjectId id;
        + * 	 private String title;
        + *
        + * 	 @Field("publisher_ac")
        + * 	 @DocumentReference(lookup = "{ 'acronym' : ?#{#target} }")
        + * 	 private Publisher publisher;
        + * }
        + *
        + * public class Publisher {
        + *
        + * 	 private ObjectId id;
        + * 	 private String acronym;
        + * 	 private String name;
        + *
        + * 	 @DocumentReference(lazy = true)
        + * 	 private List<Book> books;
        + * }
        + *
        + * @WritingConverter
        + * public class PublisherReferenceConverter implements Converter<Publisher, DocumentPointer<String>> {
        + *
        + *    public DocumentPointer<String> convert(Publisher source) {
        + * 		return () -> source.getAcronym();
        + *    }
        + * }
        + * 
        + * * @author Christoph Strobl * @since 3.3 + * @see MongoDB Reference Documentation */ @Documented @Retention(RetentionPolicy.RUNTIME) @@ -34,17 +95,38 @@ public @interface DocumentReference { /** - * The database the referred entity resides in. + * The database the linked entity resides in. * - * @return empty String by default. + * @return empty String by default. Uses the default database provided buy the {@link org.springframework.data.mongodb.MongoDatabaseFactory}. */ String db() default ""; + /** + * The database the linked entity resides in. + * + * @return empty String by default. Uses the property type for collection resolution. + */ String collection() default ""; + /** + * The single document lookup query. In case of an {@link java.util.Collection} or {@link java.util.Map} property + * the individual lookups are combined via an `$or` operator. + * + * @return an {@literal _id} based lookup. + */ String lookup() default "{ '_id' : ?#{#target} }"; + /** + * A specific sort. + * + * @return empty String by default. + */ String sort() default ""; + /** + * Controls whether the referenced entity should be loaded lazily. This defaults to {@literal false}. + * + * @return {@literal false} by default. + */ boolean lazy() default false; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java index 7c347229b6..c753f3856d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java @@ -62,6 +62,15 @@ public interface MongoPersistentProperty extends PersistentProperty { - it.customConverters(new ReferencableConverter()); + it.customConverters(new ReferencableConverter(), new SimpleObjectRefWithReadingConverterToDocumentConverter(), + new DocumentToSimpleObjectRefWithReadingConverter()); }); cfg.configureMappingContext(it -> { @@ -84,7 +87,7 @@ public void setUp() { template.flushDatabase(); } - @Test + @Test // GH-3602 void writeSimpleTypeReference() { String rootCollectionName = template.getCollectionName(SingleRefRoot.class); @@ -102,12 +105,11 @@ void writeSimpleTypeReference() { assertThat(target.get("simpleValueRef")).isEqualTo("ref-1"); } - @Test + @Test // GH-3602 void writeMapTypeReference() { String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); - CollectionRefRoot source = new CollectionRefRoot(); source.id = "root-1"; source.mapValueRef = new LinkedHashMap<>(); @@ -120,11 +122,10 @@ void writeMapTypeReference() { return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); }); - System.out.println("target: " + target.toJson()); assertThat(target.get("mapValueRef", Map.class)).containsEntry("frodo", "ref-1").containsEntry("bilbo", "ref-2"); } - @Test + @Test // GH-3602 void writeCollectionOfSimpleTypeReference() { String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); @@ -143,7 +144,7 @@ void writeCollectionOfSimpleTypeReference() { assertThat(target.get("simpleValueRef", List.class)).containsExactly("ref-1", "ref-2"); } - @Test + @Test // GH-3602 void writeObjectTypeReference() { String rootCollectionName = template.getCollectionName(SingleRefRoot.class); @@ -161,7 +162,7 @@ void writeObjectTypeReference() { assertThat(target.get("objectValueRef")).isEqualTo(source.getObjectValueRef().toReference()); } - @Test + @Test // GH-3602 void writeCollectionOfObjectTypeReference() { String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); @@ -181,7 +182,7 @@ void writeCollectionOfObjectTypeReference() { source.getObjectValueRef().get(0).toReference(), source.getObjectValueRef().get(1).toReference()); } - @Test + @Test // GH-3602 void readSimpleTypeObjectReference() { String rootCollectionName = template.getCollectionName(SingleRefRoot.class); @@ -200,7 +201,7 @@ void readSimpleTypeObjectReference() { assertThat(result.getSimpleValueRef()).isEqualTo(new SimpleObjectRef("ref-1", "me-the-referenced-object")); } - @Test + @Test // GH-3602 void readCollectionOfSimpleTypeObjectReference() { String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); @@ -220,7 +221,7 @@ void readCollectionOfSimpleTypeObjectReference() { assertThat(result.getSimpleValueRef()).containsExactly(new SimpleObjectRef("ref-1", "me-the-referenced-object")); } - @Test + @Test // GH-3602 void readLazySimpleTypeObjectReference() { String rootCollectionName = template.getCollectionName(SingleRefRoot.class); @@ -245,7 +246,7 @@ void readLazySimpleTypeObjectReference() { assertThat(result.getSimpleLazyValueRef()).isEqualTo(new SimpleObjectRef("ref-1", "me-the-referenced-object")); } - @Test + @Test // GH-3602 void readSimpleTypeObjectReferenceFromFieldWithCustomName() { String rootCollectionName = template.getCollectionName(SingleRefRoot.class); @@ -266,7 +267,7 @@ void readSimpleTypeObjectReferenceFromFieldWithCustomName() { .isEqualTo(new SimpleObjectRef("ref-1", "me-the-referenced-object")); } - @Test + @Test // GH-3602 void readCollectionTypeObjectReferenceFromFieldWithCustomName() { String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); @@ -287,7 +288,7 @@ void readCollectionTypeObjectReferenceFromFieldWithCustomName() { .containsExactly(new SimpleObjectRef("ref-1", "me-the-referenced-object")); } - @Test + @Test // GH-3602 void readObjectReferenceFromDocumentType() { String rootCollectionName = template.getCollectionName(SingleRefRoot.class); @@ -307,7 +308,7 @@ void readObjectReferenceFromDocumentType() { assertThat(result.getObjectValueRef()).isEqualTo(new ObjectRefOfDocument("ref-1", "me-the-referenced-object")); } - @Test + @Test // GH-3602 void readCollectionObjectReferenceFromDocumentType() { String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); @@ -328,7 +329,7 @@ void readCollectionObjectReferenceFromDocumentType() { .containsExactly(new ObjectRefOfDocument("ref-1", "me-the-referenced-object")); } - @Test + @Test // GH-3602 void readObjectReferenceFromDocumentDeclaringCollectionName() { String rootCollectionName = template.getCollectionName(SingleRefRoot.class); @@ -351,7 +352,7 @@ void readObjectReferenceFromDocumentDeclaringCollectionName() { .isEqualTo(new ObjectRefOfDocumentWithEmbeddedCollectionName("ref-1", "me-the-referenced-object")); } - @Test + @Test // GH-3602 void readCollectionObjectReferenceFromDocumentDeclaringCollectionName() { String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); @@ -379,7 +380,7 @@ void readCollectionObjectReferenceFromDocumentDeclaringCollectionName() { new ObjectRefOfDocumentWithEmbeddedCollectionName("ref-1", "me-the-1-referenced-object")); } - @Test + @Test // GH-3602 void readObjectReferenceFromDocumentNotRelatingToTheIdProperty() { String rootCollectionName = template.getCollectionName(SingleRefRoot.class); @@ -401,7 +402,7 @@ void readObjectReferenceFromDocumentNotRelatingToTheIdProperty() { .isEqualTo(new ObjectRefOnNonIdField("ref-1", "me-the-referenced-object", "ref-key-1", "ref-key-2")); } - @Test + @Test // GH-3602 void readLazyObjectReferenceFromDocumentNotRelatingToTheIdProperty() { String rootCollectionName = template.getCollectionName(SingleRefRoot.class); @@ -429,7 +430,7 @@ void readLazyObjectReferenceFromDocumentNotRelatingToTheIdProperty() { .isEqualTo(new ObjectRefOnNonIdField("ref-1", "me-the-referenced-object", "ref-key-1", "ref-key-2")); } - @Test + @Test // GH-3602 void readCollectionObjectReferenceFromDocumentNotRelatingToTheIdProperty() { String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); @@ -452,7 +453,7 @@ void readCollectionObjectReferenceFromDocumentNotRelatingToTheIdProperty() { .containsExactly(new ObjectRefOnNonIdField("ref-1", "me-the-referenced-object", "ref-key-1", "ref-key-2")); } - @Test + @Test // GH-3602 void readMapOfReferences() { String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); @@ -479,12 +480,414 @@ void readMapOfReferences() { }); CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); - System.out.println("result: " + result); - assertThat(result.getMapValueRef()).containsEntry("frodo", - new SimpleObjectRef("ref-1", "me-the-1-referenced-object")) - .containsEntry("bilbo", - new SimpleObjectRef("ref-2", "me-the-2-referenced-object")); + assertThat(result.getMapValueRef()) + .containsEntry("frodo", new SimpleObjectRef("ref-1", "me-the-1-referenced-object")) + .containsEntry("bilbo", new SimpleObjectRef("ref-2", "me-the-2-referenced-object")); + } + + @Test // GH-3602 + void loadLazyCyclicReference() { + + WithRefA a = new WithRefA(); + a.id = "a"; + + WithRefB b = new WithRefB(); + b.id = "b"; + + a.toB = b; + b.lazyToA = a; + + template.save(a); + template.save(b); + + WithRefA loadedA = template.query(WithRefA.class).matching(where("id").is(a.id)).firstValue(); + assertThat(loadedA).isNotNull(); + assertThat(loadedA.getToB()).isNotNull(); + LazyLoadingTestUtils.assertProxy(loadedA.getToB().lazyToA, (proxy) -> { + + assertThat(proxy.isResolved()).isFalse(); + assertThat(proxy.currentValue()).isNull(); + }); + } + + @Test // GH-3602 + void loadEagerCyclicReference() { + + WithRefA a = new WithRefA(); + a.id = "a"; + + WithRefB b = new WithRefB(); + b.id = "b"; + + a.toB = b; + b.eagerToA = a; + + template.save(a); + template.save(b); + + WithRefA loadedA = template.query(WithRefA.class).matching(where("id").is(a.id)).firstValue(); + + assertThat(loadedA).isNotNull(); + assertThat(loadedA.getToB()).isNotNull(); + assertThat(loadedA.getToB().eagerToA).isSameAs(loadedA); + } + + @Test // GH-3602 + void loadAndStoreUnresolvedLazyDoesNotResolveTheProxy() { + + String collectionB = template.getCollectionName(WithRefB.class); + + WithRefA a = new WithRefA(); + a.id = "a"; + + WithRefB b = new WithRefB(); + b.id = "b"; + + a.toB = b; + b.lazyToA = a; + + template.save(a); + template.save(b); + + WithRefA loadedA = template.query(WithRefA.class).matching(where("id").is(a.id)).firstValue(); + template.save(loadedA.getToB()); + + LazyLoadingTestUtils.assertProxy(loadedA.getToB().lazyToA, (proxy) -> { + + assertThat(proxy.isResolved()).isFalse(); + assertThat(proxy.currentValue()).isNull(); + }); + + Document target = template.execute(db -> { + return db.getCollection(collectionB).find(Filters.eq("_id", "b")).first(); + }); + assertThat(target.get("lazyToA", Object.class)).isEqualTo("a"); + } + + @Test // GH-3602 + void loadCollectionReferenceWithMissingRefs() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + + // ref-1 is missing. + Document refSource = new Document("_id", "ref-2").append("value", "me-the-2-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simpleValueRef", + Arrays.asList("ref-1", "ref-2")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getSimpleValueRef()).containsExactly(new SimpleObjectRef("ref-2", "me-the-2-referenced-object")); + } + + @Test // GH-3602 + void queryForReference() { + + WithRefB b = new WithRefB(); + b.id = "b"; + template.save(b); + + WithRefA a = new WithRefA(); + a.id = "a"; + a.toB = b; + template.save(a); + + WithRefA a2 = new WithRefA(); + a2.id = "a2"; + template.save(a2); + + WithRefA loadedA = template.query(WithRefA.class).matching(where("toB").is(b)).firstValue(); + assertThat(loadedA.getId()).isEqualTo(a.getId()); + } + + @Test // GH-3602 + void queryForReferenceInCollection() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + Document shouldBeFound = new Document("_id", "id-1").append("value", "v1").append("simpleValueRef", + Arrays.asList("ref-1", "ref-2")); + Document shouldNotBeFound = new Document("_id", "id-2").append("value", "v2").append("simpleValueRef", + Arrays.asList("ref-1")); + + template.execute(db -> { + + db.getCollection(rootCollectionName).insertOne(shouldBeFound); + db.getCollection(rootCollectionName).insertOne(shouldNotBeFound); + return null; + }); + + SimpleObjectRef objectRef = new SimpleObjectRef("ref-2", "some irrelevant value"); + + List loaded = template.query(CollectionRefRoot.class) + .matching(where("simpleValueRef").in(objectRef)).all(); + assertThat(loaded).map(CollectionRefRoot::getId).containsExactly("id-1"); + } + + @Test // GH-3602 + void queryForReferenceOnIdField() { + + WithRefB b = new WithRefB(); + b.id = "b"; + template.save(b); + + WithRefA a = new WithRefA(); + a.id = "a"; + a.toB = b; + template.save(a); + + WithRefA a2 = new WithRefA(); + a2.id = "a2"; + template.save(a2); + + WithRefA loadedA = template.query(WithRefA.class).matching(where("toB.id").is(b.id)).firstValue(); + assertThat(loadedA.getId()).isEqualTo(a.getId()); + } + + @Test // GH-3602 + void updateReferenceWithEntityHavingPointerConversion() { + + WithRefB b = new WithRefB(); + b.id = "b"; + template.save(b); + + WithRefA a = new WithRefA(); + a.id = "a"; + template.save(a); + + template.update(WithRefA.class).apply(new Update().set("toB", b)).first(); + + String collectionA = template.getCollectionName(WithRefA.class); + + Document target = template.execute(db -> { + return db.getCollection(collectionA).find(Filters.eq("_id", "a")).first(); + }); + + assertThat(target).containsEntry("toB", "b"); + } + + @Test // GH-3602 + void updateReferenceWithEntityWithoutPointerConversion() { + + String collectionName = template.getCollectionName(SingleRefRoot.class); + SingleRefRoot refRoot = new SingleRefRoot(); + refRoot.id = "root-1"; + + SimpleObjectRef ref = new SimpleObjectRef("ref-1", "me the referenced object"); + + template.save(refRoot); + + template.update(SingleRefRoot.class).apply(new Update().set("simpleValueRef", ref)).first(); + + Document target = template.execute(db -> { + return db.getCollection(collectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("simpleValueRef", "ref-1"); + } + + @Test // GH-3602 + void updateReferenceWithValue() { + + WithRefA a = new WithRefA(); + a.id = "a"; + template.save(a); + + template.update(WithRefA.class).apply(new Update().set("toB", "b")).first(); + + String collectionA = template.getCollectionName(WithRefA.class); + + Document target = template.execute(db -> { + return db.getCollection(collectionA).find(Filters.eq("_id", "a")).first(); + }); + + assertThat(target).containsEntry("toB", "b"); + } + + @Test // GH-3602 + void updateReferenceCollectionWithEntity() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot root = new CollectionRefRoot(); + root.id = "root-1"; + root.simpleValueRef = Collections.singletonList(new SimpleObjectRef("ref-1", "beastie")); + + template.save(root); + + template.update(CollectionRefRoot.class) + .apply(new Update().push("simpleValueRef").value(new SimpleObjectRef("ref-2", "boys"))).first(); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("simpleValueRef", Arrays.asList("ref-1", "ref-2")); + } + + @Test // GH-3602 + void updateReferenceCollectionWithValue() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot root = new CollectionRefRoot(); + root.id = "root-1"; + root.simpleValueRef = Collections.singletonList(new SimpleObjectRef("ref-1", "beastie")); + + template.save(root); + + template.update(CollectionRefRoot.class).apply(new Update().push("simpleValueRef").value("ref-2")).first(); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("simpleValueRef", Arrays.asList("ref-1", "ref-2")); + } + + @Test // GH-3602 + @Disabled("Property path resolution does not work inside maps, the key is considered :/") + void updateReferenceMapWithEntity() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot root = new CollectionRefRoot(); + root.id = "root-1"; + root.mapValueRef = Collections.singletonMap("beastie", new SimpleObjectRef("ref-1", "boys")); + + template.save(root); + + template.update(CollectionRefRoot.class) + .apply(new Update().set("mapValueRef.rise", new SimpleObjectRef("ref-2", "against"))).first(); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("mapValueRef", new Document("beastie", "ref-1").append("rise", "ref-2")); + } + + @Test // GH-3602 + void updateReferenceMapWithValue() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot root = new CollectionRefRoot(); + root.id = "root-1"; + root.mapValueRef = Collections.singletonMap("beastie", new SimpleObjectRef("ref-1", "boys")); + + template.save(root); + + template.update(CollectionRefRoot.class).apply(new Update().set("mapValueRef.rise", "ref-2")).first(); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("mapValueRef", new Document("beastie", "ref-1").append("rise", "ref-2")); + } + + @Test // GH-3602 + void useReadingWriterConverterPairForLoading() { + + SingleRefRoot root = new SingleRefRoot(); + root.id = "root-1"; + root.withReadingConverter = new SimpleObjectRefWithReadingConverter("ref-1", "value-1"); + + template.save(root.withReadingConverter); + + template.save(root); + + Document target = template.execute(db -> { + return db.getCollection(template.getCollectionName(SingleRefRoot.class)).find(Filters.eq("_id", root.id)).first(); + }); + + assertThat(target).containsEntry("withReadingConverter", + new Document("ref-key-from-custom-write-converter", root.withReadingConverter.id)); + + SingleRefRoot loaded = template.findOne(query(where("id").is(root.id)), SingleRefRoot.class); + assertThat(loaded.withReadingConverter).isInstanceOf(SimpleObjectRefWithReadingConverter.class); + } + + @Test // GH-3602 + void deriveMappingFromLookup() { + + Publisher publisher = new Publisher(); + publisher.id = "p-1"; + publisher.acronym = "TOR"; + publisher.name = "Tom Doherty Associates"; + + template.save(publisher); + + Book book = new Book(); + book.id = "book-1"; + book.publisher = publisher; + + template.save(book); + + Document target = template.execute(db -> { + return db.getCollection(template.getCollectionName(Book.class)).find(Filters.eq("_id", book.id)).first(); + }); + + assertThat(target).containsEntry("publisher", new Document("acc", publisher.acronym).append("n", publisher.name)); + + Book result = template.findOne(query(where("id").is(book.id)), Book.class); + assertThat(result.publisher).isNotNull(); + } + + @Test // GH-3602 + void updateDerivedMappingFromLookup() { + + Publisher publisher = new Publisher(); + publisher.id = "p-1"; + publisher.acronym = "TOR"; + publisher.name = "Tom Doherty Associates"; + + template.save(publisher); + + Book book = new Book(); + book.id = "book-1"; + + template.save(book); + + template.update(Book.class).matching(where("id").is(book.id)).apply(new Update().set("publisher", publisher)).first(); + + Document target = template.execute(db -> { + return db.getCollection(template.getCollectionName(Book.class)).find(Filters.eq("_id", book.id)).first(); + }); + + assertThat(target).containsEntry("publisher", new Document("acc", publisher.acronym).append("n", publisher.name)); + + Book result = template.findOne(query(where("id").is(book.id)), Book.class); + assertThat(result.publisher).isNotNull(); + } + + @Test // GH-3602 + void queryDerivedMappingFromLookup() { + + Publisher publisher = new Publisher(); + publisher.id = "p-1"; + publisher.acronym = "TOR"; + publisher.name = "Tom Doherty Associates"; + + template.save(publisher); + + Book book = new Book(); + book.id = "book-1"; + book.publisher = publisher; + + template.save(book); + book.publisher = publisher; + + Book result = template.findOne(query(where("publisher").is(publisher)), Book.class); + assertThat(result.publisher).isNotNull(); } @Data @@ -556,16 +959,16 @@ static class SimpleObjectRef { @Id String id; String value; - } @Getter @Setter static class SimpleObjectRefWithReadingConverter extends SimpleObjectRef { - public SimpleObjectRefWithReadingConverter(String id, String value, String id1, String value1) { + public SimpleObjectRefWithReadingConverter(String id, String value) { super(id, value); } + } @Data @@ -609,41 +1012,94 @@ public Object toReference() { } } - static class ReferencableConverter implements Converter { + static class ReferencableConverter implements Converter { @Nullable @Override - public ObjectReference convert(ReferenceAble source) { + public DocumentPointer convert(ReferenceAble source) { return source::toReference; } } @WritingConverter class DocumentToSimpleObjectRefWithReadingConverter - implements Converter, SimpleObjectRefWithReadingConverter> { + implements Converter, SimpleObjectRefWithReadingConverter> { - private final MongoTemplate template; + @Nullable + @Override + public SimpleObjectRefWithReadingConverter convert(DocumentPointer source) { - public DocumentToSimpleObjectRefWithReadingConverter(MongoTemplate template) { - this.template = template; + Document document = client.getDatabase(DB_NAME).getCollection("simple-object-ref") + .find(Filters.eq("_id", source.getPointer().get("ref-key-from-custom-write-converter"))).first(); + return new SimpleObjectRefWithReadingConverter(document.getString("_id"), document.getString("value")); } + } + + @WritingConverter + class SimpleObjectRefWithReadingConverterToDocumentConverter + implements Converter> { @Nullable @Override - public SimpleObjectRefWithReadingConverter convert(ObjectReference source) { - return template.findOne(query(where("id").is(source.getPointer().get("the-ref-key-you-did-not-expect"))), - SimpleObjectRefWithReadingConverter.class); + public DocumentPointer convert(SimpleObjectRefWithReadingConverter source) { + return () -> new Document("ref-key-from-custom-write-converter", source.getId()); } } - @WritingConverter - class SimpleObjectRefWithReadingConverterToDocumentConverter - implements Converter> { + @Getter + @Setter + static class WithRefA/* to B */ implements ReferenceAble { + + @Id String id; + @DocumentReference WithRefB toB; + + @Override + public Object toReference() { + return id; + } + } + + @Getter + @Setter + @ToString + static class WithRefB/* to A */ implements ReferenceAble { + + @Id String id; + @DocumentReference(lazy = true) WithRefA lazyToA; + + @DocumentReference WithRefA eagerToA; + + @Override + public Object toReference() { + return id; + } + } + + static class ReferencedObject {} + + class ToDocumentPointerConverter implements Converter> { @Nullable @Override - public ObjectReference convert(SimpleObjectRefWithReadingConverter source) { - return () -> new Document("the-ref-key-you-did-not-expect", source.getId()); + public DocumentPointer convert(ReferencedObject source) { + return () -> new Document("", source); } } + + @Data + static class Book { + + String id; + + @DocumentReference(lookup = "{ 'acronym' : ?#{acc}, 'name' : ?#{n} }") Publisher publisher; + + } + + static class Publisher { + + String id; + String acronym; + String name; + } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java index c0a6b8df90..d7a2870477 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java @@ -33,6 +33,7 @@ import org.mockito.junit.jupiter.MockitoExtension; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; + import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.DocumentTestUtils; @@ -64,8 +65,6 @@ void setUp() { when(factoryMock.getMongoDatabase()).thenReturn(dbMock); when(dbMock.getCollection(anyString(), any(Class.class))).thenReturn(collectionMock); when(collectionMock.find(any(Document.class))).thenReturn(cursorMock); - when(cursorMock.sort(any(Document.class))).thenReturn(cursorMock); - when(cursorMock.spliterator()).thenReturn(Collections. emptyList().spliterator()); resolver = new DefaultDbRefResolver(factoryMock); } @@ -116,7 +115,7 @@ void bulkFetchShouldRestoreOriginalOrder() { DBRef ref1 = new DBRef("collection-1", o1.get("_id")); DBRef ref2 = new DBRef("collection-1", o2.get("_id")); - when(cursorMock.spliterator()).thenReturn(Arrays.asList(o2, o1).spliterator()); + when(cursorMock.into(any())).then(invocation -> Arrays.asList(o2, o1)); assertThat(resolver.bulkFetch(Arrays.asList(ref1, ref2))).containsExactly(o1, o2); } @@ -129,7 +128,7 @@ void bulkFetchContainsDuplicates() { DBRef ref1 = new DBRef("collection-1", document.get("_id")); DBRef ref2 = new DBRef("collection-1", document.get("_id")); - when(cursorMock.spliterator()).thenReturn(Arrays.asList(document).spliterator()); + when(cursorMock.into(any())).then(invocation -> Arrays.asList(document)); assertThat(resolver.bulkFetch(Arrays.asList(ref1, ref2))).containsExactly(document, document); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index e2f69260b1..9c157db759 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -48,6 +48,7 @@ import org.springframework.data.mongodb.core.geo.GeoJsonPolygon; import org.springframework.data.mongodb.core.mapping.DBRef; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.FieldType; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; @@ -1487,4 +1488,32 @@ static class WithPropertyUsingUnderscoreInName { @Field("renamed") String renamed_fieldname_with_underscores; } + + static class WithDocumentReferences { + + @DocumentReference + Sample sample; + + @DocumentReference + SimpeEntityWithoutId noId; + + @DocumentReference(lookup = "{ 'stringProperty' : ?#{stringProperty} }") + SimpeEntityWithoutId noIdButLookupQuery; + + } + + @Test + void xxx() { + + Sample sample = new Sample(); + sample.foo = "sample-id"; + + Query query = query(where("sample").is(sample)); + + org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(WithDocumentReferences.class)); + + System.out.println("mappedObject.toJson(): " + mappedObject.toJson()); + } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java index 9aa1bb0b57..b70930dae0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java @@ -18,11 +18,10 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import org.bson.conversions.Bson; import org.springframework.data.mongodb.core.convert.ReferenceLoader; -import org.springframework.data.mongodb.core.convert.ReferenceLoader.ReferenceFilter; +import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery; import org.springframework.data.mongodb.core.convert.ReferenceReader; -import org.springframework.data.util.Streamable; +import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; @@ -107,7 +106,7 @@ public void setUp() throws Exception { @Nullable @Override - public Object resolveReference(MongoPersistentProperty property, Object source, ReferenceReader referenceReader, BiFunction> lookupFunction) { + public Object resolveReference(MongoPersistentProperty property, Object source, ReferenceReader referenceReader, LookupFunction lookupFunction, ResultConversionFunction resultConversionFunction) { return null; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java index 9ab37e3ff5..61caa30560 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java @@ -1434,4 +1434,19 @@ void annotatedQueryShouldAllowAggregationInProjection() { Person target = repository.findWithAggregationInProjection(alicia.getId()); assertThat(target.getFirstname()).isEqualTo(alicia.getFirstname().toUpperCase()); } + + @Test // GH-3602 + void executesQueryWithDocumentReferenceCorrectly() { + + Person josh = new Person("Josh", "Long"); + User dave = new User(); + dave.id = "dave"; + + josh.setSpiritAnimal(dave); + + operations.save(josh); + + List result = repository.findBySpiritAnimal(dave); + assertThat(result).map(Person::getId).containsExactly(josh.getId()); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Person.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Person.java index 01b0c28de2..62c5b18be5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Person.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/Person.java @@ -27,6 +27,7 @@ import org.springframework.data.mongodb.core.index.Indexed; import org.springframework.data.mongodb.core.mapping.DBRef; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.Unwrapped; @@ -74,6 +75,9 @@ public enum Sex { @Unwrapped.Nullable(prefix = "u") // User unwrappedUser; + @DocumentReference + User spiritAnimal; + public Person() { this(null, null); @@ -308,6 +312,14 @@ public void setUnwrappedUser(User unwrappedUser) { this.unwrappedUser = unwrappedUser; } + public User getSpiritAnimal() { + return spiritAnimal; + } + + public void setSpiritAnimal(User spiritAnimal) { + this.spiritAnimal = spiritAnimal; + } + /* * (non-Javadoc) * diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java index 314655e781..ca382fa2ca 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java @@ -416,4 +416,6 @@ Person findPersonByManyArguments(String firstname, String lastname, String email List findByUnwrappedUserUsername(String username); List findByUnwrappedUser(User user); + + List findBySpiritAnimal(User user); } diff --git a/src/main/asciidoc/new-features.adoc b/src/main/asciidoc/new-features.adoc index 03d18bacf9..842dd8341b 100644 --- a/src/main/asciidoc/new-features.adoc +++ b/src/main/asciidoc/new-features.adoc @@ -1,6 +1,11 @@ [[new-features]] = New & Noteworthy +[[new-features.3.3]] +== What's New in Spring Data MongoDB 3.3 + +* Extended support for <> entities. + [[new-features.3.2]] == What's New in Spring Data MongoDB 3.2 diff --git a/src/main/asciidoc/reference/mapping.adoc b/src/main/asciidoc/reference/mapping.adoc index 82b5632f2b..1998fe1ad8 100644 --- a/src/main/asciidoc/reference/mapping.adoc +++ b/src/main/asciidoc/reference/mapping.adoc @@ -480,6 +480,7 @@ The MappingMongoConverter can use metadata to drive the mapping of objects to do * `@MongoId`: Applied at the field level to mark the field used for identity purpose. Accepts an optional `FieldType` to customize id conversion. * `@Document`: Applied at the class level to indicate this class is a candidate for mapping to the database. You can specify the name of the collection where the data will be stored. * `@DBRef`: Applied at the field to indicate it is to be stored using a com.mongodb.DBRef. +* `@DocumentReference`: Applied at the field to indicate it is to be stored as a pointer to another document. This can be a single value (the _id_ by default), or a `Document` provided via a converter. * `@Indexed`: Applied at the field level to describe how to index the field. * `@CompoundIndex` (repeatable): Applied at the type level to declare Compound Indexes. * `@GeoSpatialIndexed`: Applied at the field level to describe how to geoindex the field. @@ -826,6 +827,370 @@ Required properties that are also defined as lazy loading ``DBRef`` and used as TIP: Lazily loaded ``DBRef``s can be hard to debug. Make sure tooling does not accidentally trigger proxy resolution by eg. calling `toString()` or some inline debug rendering invoking property getters. Please consider to enable _trace_ logging for `org.springframework.data.mongodb.core.convert.DefaultDbRefResolver` to gain insight on `DBRef` resolution. +[[mapping-usage.linking]] +=== Using Document References + +Using `@DocumentReference` offers an alternative way of linking entities in MongoDB. +While the goal is the same as when using <>, the store representation is different. +`DBRef` resolves to a document with a fixed structure as outlined in the https://docs.mongodb.com/manual/reference/database-references/[MongoDB Reference documentation]. + +Document references, do not follow a specific format. +They can be literally anything, a single value, an entire document, basically everything that can be stored in MongoDB. +By default, the mapping layer will use the referenced entities _id_ value for storage and retrieval, like in the sample below. + +==== +[source,java] +---- +@Document +public class Account { + + @Id + private String id; + private Float total; +} + +@Document +public class Person { + + @Id + private String id; + + @DocumentReference <1> + private List accounts; +} +---- +[source,java] +---- +Account account = ... + +tempate.insert(account); <2> + +template.update(Person.class) + .matching(where("id").is(...)) + .apply(new Update().push("accounts").value(account)) <3> + .first(); +---- +[source,json] +---- +{ + "_id" : ..., + "accounts" : [ "6509b9e", ... ] <4> +} +---- +<1> Mark the collection of `Account` values to be linked. +<2> The mapping framework does not handle cascading saves, so make sure to persist the referenced entity individually. +<3> Add the reference to the existing entity. +<4> Linked `Account` entities are represented as an array of their `_id` values. +==== + +The sample above uses an `_id` based fetch query (`{ '_id' : ?#{#target} }`) for data retrieval and resolves linked entities eagerly. +It is possible to alter resolution defaults (listed below) via the attributes of `@DocumentReference` + +.@DocumentReference defaults +[cols="2,3,5", options="header"] +|=== +| Attribute | Description | Default + +| `db` +| The target database name for collection lookup. +| The configured database provided by `MongoDatabaseFactory.getMongoDatabase()`. + +| `collection` +| The target collection name. +| The annotated properties domain type, respectively the value type in case of `Collection` like or `Map` properties, collection name. + +| `lookup` +| The single document lookup query evaluating placeholders via SpEL expressions using `#target` as the marker for a given source value. `Collection` like or `Map` properties combine individual lookups via an `$or` operator. +| An `_id` field based query (`{ '_id' : ?#{#target} }`) using the loaded source value. + +| `lazy` +| If set to `true` value resolution is delayed upon first access of the property. +| Resolves properties eagerly by default. +|=== + +`@DocumentReference(lookup=...)` allows to define custom queries that are independent from the `_id` field and therefore offer a flexible way of defining links between entities as demonstrated in the sample below, where the `Publisher` of a book is referenced by its acronym instead of the internal `id`. + +==== +[source,java] +---- +@Document +public class Book { + + @Id + private ObjectId id; + private String title; + private List author; + + @Field("publisher_ac") + @DocumentReference(lookup = "{ 'acronym' : ?#{#target} }") <1> + private Publisher publisher; +} + +@Document +public class Publisher { + + @Id + private ObjectId id; + private String acronym; <1> + private String name; + + @DocumentReference(lazy = true) <2> + private List books; + +} +---- +[source,json] +---- +{ + "_id" : 9a48e32, + "title" : "The Warded Man", + "author" : ["Peter V. Brett"], + "publisher_ac" : "DR" +} +---- +<1> Use the `acronym` field to query for entities in the `Publisher` collection. +<2> Lazy load back references to the `Book` collection. +==== + +The above snipped shows the reading side of things when working with custom linked objects. +To make the writing part aware of the modified document pointer a custom converter, capable of the transformation into a `DocumentPointer`, like the one below, needs to be registered. + +==== +[source,java] +---- +@WritingConverter +class PublisherReferenceConverter implements Converter> { + + @Override + public DocumentPointer convert(Publisher source) { + return () -> source.getAcronym(); + } +} +---- +==== + +If no `DocumentPointer` converter is provided the target linkage document can be computed based on the given lookup query. +In this case the association target properties are evaluated as shown in the following sample. + +==== +[source,java] +---- +@Document +public class Book { + + @Id + private ObjectId id; + private String title; + private List author; + + @DocumentReference(lookup = "{ 'acronym' : ?#{acc} }") <1> <2> + private Publisher publisher; +} + +@Document +public class Publisher { + + @Id + private ObjectId id; + private String acronym; <1> + private String name; + + // ... +} +---- +[source,json] +---- +{ + "_id" : 9a48e32, + "title" : "The Warded Man", + "author" : ["Peter V. Brett"], + "publisher" : { + "acc" : "DOC" + } +} +---- +<1> Use the `acronym` field to query for entities in the `Publisher` collection. +<2> The field value placeholders of the lookup query (like `acc`) is used to form the linkage document. +==== + +With all the above in place it is possible to model all kind of associations between entities. +Have a look at the non exhaustive list of samples below to get feeling for what is possible. + +.Simple Document Reference using _id_ field +==== +[source,java] +---- +class Entity { + @DocumentReference + private ReferencedObject ref; +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : "9a48e32" <1> +} + +// referenced object +{ + "_id" : "9a48e32" <1> +} +---- +<1> MongoDB simple type can be directly used without further configuration. +==== + +.Simple Document Reference using _id_ field with explicit lookup query +==== +[source,java] +---- +class Entity { + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") <1> + private ReferencedObject ref; +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : "9a48e32" <1> +} + +// referenced object +{ + "_id" : "9a48e32" +} +---- +<1> _target_ defines the linkage value itself. +==== + +.Document Reference extracting field of linkage document for lookup query +==== +[source,java] +---- +class Entity { + @DocumentReference(lookup = "{ '_id' : '?#{refKey}' }") <1> <2> + private ReferencedObject ref; +} +---- + +[source,java] +---- +@WritingConverter +class ToDocumentPointerConverter implements Converter> { + public DocumentPointer convert(ReferencedObject source) { + return () -> new Document("refKey", source.id); <1> + } +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : { + "refKey" : "9a48e32" <1> + } +} + +// referenced object +{ + "_id" : "9a48e32" +} +---- +<1> The key used for obtaining the linkage value must be the one used during write. +<2> `refKey` is short for `target.refKey`. +==== + +.Document Reference with multiple values forming the lookup query +==== +[source,java] +---- +class Entity { + @DocumentReference(lookup = "{ 'firstname' : '?#{fn}', 'lastname' : '?#{ln}' }") <1> <2> + private ReferencedObject ref; +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : { + "fn" : "Josh", <1> + "ln" : "Long" <1> + } +} + +// referenced object +{ + "_id" : "9a48e32", + "firsntame" : "Josh", <2> + "lastname" : "Long", <2> +} +---- +<1> Read/wirte the keys `fn` & `ln` from/to the linkage document based on the lookup query. +<2> Use non _id_ fields for the lookup of the target documents. +==== + +.Document Reference reading target collection from linkage document +==== +[source,java] +---- +class Entity { + @DocumentReference(lookup = "{ '_id' : '?#{id}' }", collection = "?#{collection}") <2> + private ReferencedObject ref; +} +---- + +[source,java] +---- +@WritingConverter +class ToDocumentPointerConverter implements Converter> { + public DocumentPointer convert(ReferencedObject source) { + return () -> new Document("id", source.id) <1> + .append("collection", ... ); <2> + } +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : { + "id" : "9a48e32", <1> + "collection" : "..." <2> + } +} +---- +<1> Read/wirte the keys `_id` from/to the linkage document to use them in the lookup query. +<2> The collection name can be read from the linkage document via its key. +==== + +[WARNING] +==== +We know it is tempting to use all kinds of MongoDB query operators in the lookup query and this is fine. But: + +* Make sure to have indexes in place that support your lookup. +* Mind that resolution takes time and consider a lazy strategy. +* A collection of document references is bulk loaded using an `$or` operator. + +The original element order is restored in memory which cannot be done when using MongoDB query operators. +In this case Results will be ordered as they are received from the store. + +And a few more general remarks: + +* Cyclic references? Ask your self if you need them. +* Lazy document references are hard to debug. Make sure tooling does not accidentally trigger proxy resolution by eg. calling `toString()`. +* There is no support for reading document references via the reactive bits Spring Data MongoDB offers. +==== + [[mapping-usage-events]] === Mapping Framework Events From 82af678caba783997f920d2fe00e0309f5667e9d Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 18 May 2021 10:43:13 +0200 Subject: [PATCH 018/983] Polishing Rename ReferenceReader to ReferenceLookupDelegate. Rename LazyLoadingProxyGenerator to LazyLoadingProxyFactory. Rename DefaultReferenceLoader to MongoDatabaseFactoryReferenceLoader. Reduce scope of LookupFunction and move it to ReferenceLookupDelegate. Extract some checks into methods to reflect the underlying concepts. Simplify code, convert variables to constants where possible. Original pull request: #3647. Closes #3602. --- .../core/convert/DefaultDbRefResolver.java | 9 ++- .../convert/DefaultReferenceResolver.java | 32 +++++--- .../core/convert/DocumentPointerFactory.java | 39 +++++++--- ...ator.java => LazyLoadingProxyFactory.java} | 58 +++++++------- .../core/convert/MappingMongoConverter.java | 38 ++++++---- ... MongoDatabaseFactoryReferenceLoader.java} | 8 +- .../core/convert/NoOpDbRefResolver.java | 10 +-- .../mongodb/core/convert/ReferenceLoader.java | 30 +++++--- ...ader.java => ReferenceLookupDelegate.java} | 76 +++++++++++-------- .../core/convert/ReferenceResolver.java | 33 ++------ .../mapping/BasicMongoPersistentProperty.java | 2 +- .../core/convert/LazyLoadingTestUtils.java | 7 +- .../core/convert/QueryMapperUnitTests.java | 9 ++- .../BasicMongoPersistentEntityUnitTests.java | 6 +- .../performance/ReactivePerformanceTests.java | 13 ++-- 15 files changed, 204 insertions(+), 166 deletions(-) rename spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/{LazyLoadingProxyGenerator.java => LazyLoadingProxyFactory.java} (81%) rename spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/{DefaultReferenceLoader.java => MongoDatabaseFactoryReferenceLoader.java} (85%) rename spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/{ReferenceReader.java => ReferenceLookupDelegate.java} (84%) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java index 5277fbc0b0..f64c7f0f06 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java @@ -47,6 +47,7 @@ import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.MongoDatabaseUtils; import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.lang.Nullable; import org.springframework.objenesis.ObjenesisStd; @@ -83,7 +84,7 @@ public class DefaultDbRefResolver extends DefaultReferenceResolver implements Db */ public DefaultDbRefResolver(MongoDatabaseFactory mongoDbFactory) { - super(new DefaultReferenceLoader(mongoDbFactory)); + super(new MongoDatabaseFactoryReferenceLoader(mongoDbFactory)); Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!"); @@ -117,7 +118,7 @@ public Object resolveDbRef(MongoPersistentProperty property, @Nullable DBRef dbr */ @Override public Document fetch(DBRef dbRef) { - return getReferenceLoader().fetch(DocumentReferenceQuery.singleReferenceFilter(Filters.eq("_id", dbRef.getId())), + return getReferenceLoader().fetchOne(DocumentReferenceQuery.forSingleDocument(Filters.eq("_id", dbRef.getId())), ReferenceCollection.fromDBRef(dbRef)); } @@ -159,7 +160,7 @@ public List bulkFetch(List refs) { } List result = mongoCollection // - .find(new Document("_id", new Document("$in", ids))) // + .find(new Document(BasicMongoPersistentProperty.ID_FIELD_NAME, new Document("$in", ids))) // .into(new ArrayList<>()); return ids.stream() // @@ -239,7 +240,7 @@ private boolean isLazyDbRef(MongoPersistentProperty property) { private static Stream documentWithId(Object identifier, Collection documents) { return documents.stream() // - .filter(it -> it.get("_id").equals(identifier)) // + .filter(it -> it.get(BasicMongoPersistentProperty.ID_FIELD_NAME).equals(identifier)) // .limit(1); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java index 0692f719b5..7e38b6995d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java @@ -15,7 +15,10 @@ */ package org.springframework.data.mongodb.core.convert; -import org.springframework.data.mongodb.core.mapping.DocumentReference; +import static org.springframework.data.mongodb.core.convert.ReferenceLookupDelegate.*; + +import java.util.Collections; + import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.lang.Nullable; @@ -37,21 +40,32 @@ public ReferenceLoader getReferenceLoader() { @Nullable @Override - public Object resolveReference(MongoPersistentProperty property, Object source, ReferenceReader referenceReader, - LookupFunction lookupFunction, ResultConversionFunction resultConversionFunction) { + public Object resolveReference(MongoPersistentProperty property, Object source, + ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) { + + LookupFunction lookupFunction = (filter, ctx) -> { + if (property.isCollectionLike() || property.isMap()) { + return getReferenceLoader().fetchMany(filter, ctx); + + } + + Object target = getReferenceLoader().fetchOne(filter, ctx); + return target == null ? Collections.emptyList() + : Collections.singleton(getReferenceLoader().fetchOne(filter, ctx)); + }; if (isLazyReference(property)) { - return createLazyLoadingProxy(property, source, referenceReader, lookupFunction, resultConversionFunction); + return createLazyLoadingProxy(property, source, referenceLookupDelegate, lookupFunction, entityReader); } - return referenceReader.readReference(property, source, lookupFunction, resultConversionFunction); + return referenceLookupDelegate.readReference(property, source, lookupFunction, entityReader); } private Object createLazyLoadingProxy(MongoPersistentProperty property, Object source, - ReferenceReader referenceReader, LookupFunction lookupFunction, - ResultConversionFunction resultConversionFunction) { - return new LazyLoadingProxyGenerator(referenceReader).createLazyLoadingProxy(property, source, lookupFunction, - resultConversionFunction); + ReferenceLookupDelegate referenceLookupDelegate, LookupFunction lookupFunction, + MongoEntityReader entityReader) { + return new LazyLoadingProxyFactory(referenceLookupDelegate).createLazyLoadingProxy(property, source, lookupFunction, + entityReader); } protected boolean isLazyReference(MongoPersistentProperty property) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java index a91a48d922..8e9554b6ba 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java @@ -17,12 +17,14 @@ import java.util.HashMap; import java.util.LinkedHashMap; +import java.util.Locale; import java.util.Map; import java.util.Map.Entry; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.bson.Document; + import org.springframework.core.convert.ConversionService; import org.springframework.data.mapping.PersistentPropertyAccessor; import org.springframework.data.mapping.context.MappingContext; @@ -37,9 +39,9 @@ */ class DocumentPointerFactory { - private ConversionService conversionService; - private MappingContext, MongoPersistentProperty> mappingContext; - private Map linkageMap; + private final ConversionService conversionService; + private final MappingContext, MongoPersistentProperty> mappingContext; + private final Map linkageMap; public DocumentPointerFactory(ConversionService conversionService, MappingContext, MongoPersistentProperty> mappingContext) { @@ -60,15 +62,24 @@ public DocumentPointer computePointer(MongoPersistentProperty property, Objec } else { MongoPersistentEntity persistentEntity = mappingContext - .getPersistentEntity(property.getAssociationTargetType()); + .getRequiredPersistentEntity(property.getAssociationTargetType()); - if (!property.getDocumentReference().lookup().toLowerCase().replaceAll("\\s", "").replaceAll("'", "") + // TODO: Extract method + if (!property.getDocumentReference().lookup().toLowerCase(Locale.ROOT).replaceAll("\\s", "").replaceAll("'", "") .equals("{_id:?#{#target}}")) { - return () -> linkageMap.computeIfAbsent(property.getDocumentReference().lookup(), key -> { - return new LinkageDocument(key); - }).get(persistentEntity, - BeanWrapperPropertyAccessorFactory.INSTANCE.getPropertyAccessor(property.getOwner(), value)); + MongoPersistentEntity valueEntity = mappingContext.getPersistentEntity(value.getClass()); + PersistentPropertyAccessor propertyAccessor; + if (valueEntity == null) { + propertyAccessor = BeanWrapperPropertyAccessorFactory.INSTANCE.getPropertyAccessor(property.getOwner(), + value); + } else { + propertyAccessor = valueEntity.getPropertyAccessor(value); + + } + + return () -> linkageMap.computeIfAbsent(property.getDocumentReference().lookup(), LinkageDocument::new) + .get(persistentEntity, propertyAccessor); } // just take the id as a reference @@ -78,6 +89,8 @@ public DocumentPointer computePointer(MongoPersistentProperty property, Objec static class LinkageDocument { + static final Pattern pattern = Pattern.compile("\\?#\\{#?[\\w\\d]*\\}"); + String lookup; org.bson.Document fetchDocument; Map mapMap; @@ -87,16 +100,18 @@ public LinkageDocument(String lookup) { this.lookup = lookup; String targetLookup = lookup; - Pattern pattern = Pattern.compile("\\?#\\{#?[\\w\\d]*\\}"); Matcher matcher = pattern.matcher(lookup); int index = 0; mapMap = new LinkedHashMap<>(); + + // TODO: Make explicit what's happening here while (matcher.find()) { String expr = matcher.group(); - mapMap.put(Integer.valueOf(index), expr.substring(0, expr.length() - 1).replace("?#{#", "").replace("?#{", "") - .replace("target.", "").replaceAll("'", "")); + String sanitized = expr.substring(0, expr.length() - 1).replace("?#{#", "").replace("?#{", "") + .replace("target.", "").replaceAll("'", ""); + mapMap.put(index, sanitized); targetLookup = targetLookup.replace(expr, index + ""); index++; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyGenerator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java similarity index 81% rename from spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyGenerator.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java index 570a516d9b..8c2156df2e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyGenerator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java @@ -15,47 +15,46 @@ */ package org.springframework.data.mongodb.core.convert; +import static org.springframework.data.mongodb.core.convert.ReferenceLookupDelegate.*; import static org.springframework.util.ReflectionUtils.*; import java.io.Serializable; import java.lang.reflect.Method; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - import org.aopalliance.intercept.MethodInterceptor; import org.aopalliance.intercept.MethodInvocation; + import org.springframework.aop.framework.ProxyFactory; import org.springframework.cglib.proxy.Callback; import org.springframework.cglib.proxy.Enhancer; import org.springframework.cglib.proxy.Factory; import org.springframework.cglib.proxy.MethodProxy; -import org.springframework.data.mongodb.core.convert.ReferenceResolver.LookupFunction; -import org.springframework.data.mongodb.core.convert.ReferenceResolver.ResultConversionFunction; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.MongoEntityReader; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.lang.Nullable; import org.springframework.objenesis.ObjenesisStd; import org.springframework.util.ReflectionUtils; /** * @author Christoph Strobl */ -class LazyLoadingProxyGenerator { +class LazyLoadingProxyFactory { private final ObjenesisStd objenesis; - private final ReferenceReader referenceReader; + private final ReferenceLookupDelegate lookupDelegate; - public LazyLoadingProxyGenerator(ReferenceReader referenceReader) { + public LazyLoadingProxyFactory(ReferenceLookupDelegate lookupDelegate) { - this.referenceReader = referenceReader; + this.lookupDelegate = lookupDelegate; this.objenesis = new ObjenesisStd(true); } public Object createLazyLoadingProxy(MongoPersistentProperty property, Object source, LookupFunction lookupFunction, - ResultConversionFunction resultConversionFunction) { + MongoEntityReader entityReader) { Class propertyType = property.getType(); - LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, source, referenceReader, lookupFunction, - resultConversionFunction); + LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, source, lookupDelegate, lookupFunction, + entityReader); if (!propertyType.isInterface()) { @@ -97,13 +96,13 @@ private Class getEnhancedTypeFor(Class type) { public static class LazyLoadingInterceptor implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor, Serializable { - private final ReferenceReader referenceReader; - MongoPersistentProperty property; + private final ReferenceLookupDelegate referenceLookupDelegate; + private final MongoPersistentProperty property; private volatile boolean resolved; - private @org.springframework.lang.Nullable Object result; - private Object source; - private LookupFunction lookupFunction; - private ResultConversionFunction resultConversionFunction; + private @Nullable Object result; + private final Object source; + private final LookupFunction lookupFunction; + private final MongoEntityReader entityReader; private final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD, GET_SOURCE_METHOD; @@ -118,22 +117,23 @@ public static class LazyLoadingInterceptor } } - public LazyLoadingInterceptor(MongoPersistentProperty property, Object source, ReferenceReader reader, - LookupFunction lookupFunction, ResultConversionFunction resultConversionFunction) { + public LazyLoadingInterceptor(MongoPersistentProperty property, Object source, ReferenceLookupDelegate reader, + LookupFunction lookupFunction, MongoEntityReader entityReader) { this.property = property; this.source = source; - this.referenceReader = reader; + this.referenceLookupDelegate = reader; this.lookupFunction = lookupFunction; - this.resultConversionFunction = resultConversionFunction; + this.entityReader = entityReader; } @Nullable @Override - public Object invoke(@Nonnull MethodInvocation invocation) throws Throwable { + public Object invoke(MethodInvocation invocation) throws Throwable { return intercept(invocation.getThis(), invocation.getMethod(), invocation.getArguments(), null); } + @Nullable @Override public Object intercept(Object o, Method method, Object[] args, MethodProxy proxy) throws Throwable { @@ -180,6 +180,7 @@ public Object intercept(Object o, Method method, Object[] args, MethodProxy prox return method.invoke(target, args); } + @Nullable private Object ensureResolved() { if (!resolved) { @@ -190,7 +191,7 @@ private Object ensureResolved() { return this.result; } - private String proxyToString(Object source) { + private String proxyToString(@Nullable Object source) { StringBuilder description = new StringBuilder(); if (source != null) { @@ -203,7 +204,7 @@ private String proxyToString(Object source) { return description.toString(); } - private boolean proxyEquals(@org.springframework.lang.Nullable Object proxy, Object that) { + private boolean proxyEquals(@Nullable Object proxy, Object that) { if (!(that instanceof LazyLoadingProxy)) { return false; @@ -216,11 +217,11 @@ private boolean proxyEquals(@org.springframework.lang.Nullable Object proxy, Obj return proxyToString(proxy).equals(that.toString()); } - private int proxyHashCode(@org.springframework.lang.Nullable Object proxy) { + private int proxyHashCode(@Nullable Object proxy) { return proxyToString(proxy).hashCode(); } - @org.springframework.lang.Nullable + @Nullable private synchronized Object resolve() { if (resolved) { @@ -238,7 +239,7 @@ private synchronized Object resolve() { // property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName()); // } - return referenceReader.readReference(property, source, lookupFunction, resultConversionFunction); + return referenceLookupDelegate.readReference(property, source, lookupFunction, entityReader); } catch (RuntimeException ex) { throw ex; @@ -254,4 +255,5 @@ private synchronized Object resolve() { } } } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 2ad4d75230..8a77b51e2f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -38,6 +38,7 @@ import org.bson.types.ObjectId; import org.slf4j.Logger; import org.slf4j.LoggerFactory; + import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; @@ -63,6 +64,7 @@ import org.springframework.data.mapping.model.SpELExpressionParameterValueProvider; import org.springframework.data.mongodb.CodecRegistryProvider; import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.DocumentPointer; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; @@ -114,7 +116,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App protected final QueryMapper idMapper; protected final DbRefResolver dbRefResolver; protected final DefaultDbRefProxyHandler dbRefProxyHandler; - protected final ReferenceReader referenceReader; + protected final ReferenceLookupDelegate referenceLookupDelegate; protected @Nullable ApplicationContext applicationContext; protected MongoTypeMapper typeMapper; @@ -123,7 +125,7 @@ public class MappingMongoConverter extends AbstractMongoConverter implements App private SpELContext spELContext; private @Nullable EntityCallbacks entityCallbacks; - private DocumentPointerFactory documentPointerFactory; + private final DocumentPointerFactory documentPointerFactory; /** * Creates a new {@link MappingMongoConverter} given the new {@link DbRefResolver} and {@link MappingContext}. @@ -154,7 +156,7 @@ public MappingMongoConverter(DbRefResolver dbRefResolver, return MappingMongoConverter.this.getValueInternal(context, prop, bson, evaluator); }); - this.referenceReader = new ReferenceReader(mappingContext, () -> spELContext); + this.referenceLookupDelegate = new ReferenceLookupDelegate(mappingContext, spELContext); this.documentPointerFactory = new DocumentPointerFactory(conversionService, mappingContext); } @@ -361,16 +363,15 @@ private ParameterValueProvider getParameterProvider(Con parameterProvider); } - private S read(ConversionContext context, MongoPersistentEntity entity, Document bson) { + private S read(ConversionContext context, MongoPersistentEntity entity, Document bson) { SpELExpressionEvaluator evaluator = new DefaultSpELExpressionEvaluator(bson, spELContext); DocumentAccessor documentAccessor = new DocumentAccessor(bson); - if (bson.get("_id") != null) { - - Object existing = context.getPath().getPathItem(bson.get("_id"), entity.getCollection(), entity.getType()); + if (hasIdentifier(bson)) { + S existing = findContextualEntity(context, entity, bson); if (existing != null) { - return (S) existing; + return existing; } } @@ -391,6 +392,16 @@ private S read(ConversionContext context, MongoPersistentEnti return instance; } + private boolean hasIdentifier(Document bson) { + return bson.get(BasicMongoPersistentProperty.ID_FIELD_NAME) != null; + } + + @Nullable + private S findContextualEntity(ConversionContext context, MongoPersistentEntity entity, Document bson) { + return context.getPath().getPathItem(bson.get(BasicMongoPersistentProperty.ID_FIELD_NAME), entity.getCollection(), + entity.getType()); + } + private S populateProperties(ConversionContext context, MongoPersistentEntity entity, DocumentAccessor documentAccessor, SpELExpressionEvaluator evaluator, S instance) { @@ -509,7 +520,7 @@ private void readAssociation(Association association, P ConversionContext context, SpELExpressionEvaluator evaluator) { MongoPersistentProperty property = association.getInverse(); - final Object value = documentAccessor.get(property); + Object value = documentAccessor.get(property); if (value == null) { return; @@ -521,18 +532,13 @@ private void readAssociation(Association association, P if (conversionService.canConvert(DocumentPointer.class, property.getActualType())) { - DocumentPointer pointer = new DocumentPointer() { - @Override - public Object getPointer() { - return value; - } - }; + DocumentPointer pointer = () -> value; // collection like special treatment accessor.setProperty(property, conversionService.convert(pointer, property.getActualType())); } else { accessor.setProperty(property, - dbRefResolver.resolveReference(property, value, referenceReader, context::convert)); + dbRefResolver.resolveReference(property, value, referenceLookupDelegate, context::convert)); } return; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceLoader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoDatabaseFactoryReferenceLoader.java similarity index 85% rename from spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceLoader.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoDatabaseFactoryReferenceLoader.java index 66b698077b..2483f57543 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceLoader.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoDatabaseFactoryReferenceLoader.java @@ -29,13 +29,13 @@ /** * @author Christoph Strobl */ -public class DefaultReferenceLoader implements ReferenceLoader { +public class MongoDatabaseFactoryReferenceLoader implements ReferenceLoader { - private static final Logger LOGGER = LoggerFactory.getLogger(DefaultReferenceLoader.class); + private static final Logger LOGGER = LoggerFactory.getLogger(MongoDatabaseFactoryReferenceLoader.class); private final MongoDatabaseFactory mongoDbFactory; - public DefaultReferenceLoader(MongoDatabaseFactory mongoDbFactory) { + public MongoDatabaseFactoryReferenceLoader(MongoDatabaseFactory mongoDbFactory) { Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!"); @@ -43,7 +43,7 @@ public DefaultReferenceLoader(MongoDatabaseFactory mongoDbFactory) { } @Override - public Iterable bulkFetch(DocumentReferenceQuery filter, ReferenceCollection context) { + public Iterable fetchMany(DocumentReferenceQuery filter, ReferenceCollection context) { MongoCollection collection = getCollection(context); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java index 8b6c969439..41d7ab3c12 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java @@ -16,13 +16,10 @@ package org.springframework.data.mongodb.core.convert; import java.util.List; -import java.util.function.BiFunction; -import java.util.stream.Stream; import org.bson.Document; -import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery; + import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; import com.mongodb.DBRef; @@ -76,9 +73,8 @@ private T handle() throws UnsupportedOperationException { @Nullable @Override - public Object resolveReference(MongoPersistentProperty property, Object source, ReferenceReader referenceReader, - LookupFunction lookupFunction, - ResultConversionFunction resultConversionFunction) { + public Object resolveReference(MongoPersistentProperty property, Object source, + ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) { return null; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java index d5c72afad8..7cfd5e3153 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java @@ -31,13 +31,13 @@ public interface ReferenceLoader { @Nullable - default Document fetch(DocumentReferenceQuery filter, ReferenceCollection context) { + default Document fetchOne(DocumentReferenceQuery filter, ReferenceCollection context) { - Iterator it = bulkFetch(filter, context).iterator(); + Iterator it = fetchMany(filter, context).iterator(); return it.hasNext() ? it.next() : null; } - Iterable bulkFetch(DocumentReferenceQuery filter, ReferenceCollection context); + Iterable fetchMany(DocumentReferenceQuery filter, ReferenceCollection context); interface DocumentReferenceQuery { @@ -52,16 +52,12 @@ default Bson getSort() { default Iterable apply(MongoCollection collection) { return restoreOrder(collection.find(getFilter()).sort(getSort())); } - + default Iterable restoreOrder(Iterable documents) { return documents; } - static DocumentReferenceQuery referenceFilter(Bson bson) { - return () -> bson; - } - - static DocumentReferenceQuery singleReferenceFilter(Bson bson) { + static DocumentReferenceQuery forSingleDocument(Bson bson) { return new DocumentReferenceQuery() { @@ -78,6 +74,22 @@ public Iterable apply(MongoCollection collection) { } }; } + + static DocumentReferenceQuery forManyDocuments(Bson bson) { + + return new DocumentReferenceQuery() { + + @Override + public Bson getFilter() { + return bson; + } + + @Override + public Iterable apply(MongoCollection collection) { + return collection.find(getFilter()).sort(getSort()); + } + }; + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceReader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java similarity index 84% rename from spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceReader.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java index fb37367b1d..3c441c1388 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceReader.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java @@ -29,12 +29,12 @@ import org.bson.Document; import org.bson.conversions.Bson; + import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mapping.model.SpELContext; import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery; -import org.springframework.data.mongodb.core.convert.ReferenceResolver.LookupFunction; +import org.springframework.data.mongodb.core.convert.ReferenceResolver.MongoEntityReader; import org.springframework.data.mongodb.core.convert.ReferenceResolver.ReferenceCollection; -import org.springframework.data.mongodb.core.convert.ReferenceResolver.ResultConversionFunction; import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; @@ -42,58 +42,59 @@ import org.springframework.data.mongodb.util.json.ParameterBindingContext; import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; import org.springframework.data.mongodb.util.json.ValueProvider; -import org.springframework.data.util.Lazy; import org.springframework.data.util.Streamable; import org.springframework.expression.EvaluationContext; import org.springframework.lang.Nullable; +import org.springframework.util.Assert; import org.springframework.util.StringUtils; import com.mongodb.DBRef; import com.mongodb.client.MongoCollection; /** + * A common delegate for {@link ReferenceResolver} implementations to resolve a reference to one/many target documents + * that are converted to entities. + * * @author Christoph Strobl + * @author Mark Paluch */ -public class ReferenceReader { +public final class ReferenceLookupDelegate { - private final Lazy, MongoPersistentProperty>> mappingContext; - private final Supplier spelContextSupplier; + private final MappingContext, MongoPersistentProperty> mappingContext; + private final SpELContext spELContext; private final ParameterBindingDocumentCodec codec; - public ReferenceReader(MappingContext, MongoPersistentProperty> mappingContext, - Supplier spelContextSupplier) { - - this(() -> mappingContext, spelContextSupplier); - } + public ReferenceLookupDelegate( + MappingContext, MongoPersistentProperty> mappingContext, + SpELContext spELContext) { - public ReferenceReader( - Supplier, MongoPersistentProperty>> mappingContextSupplier, - Supplier spelContextSupplier) { + Assert.notNull(mappingContext, "MappingContext must not be null"); + Assert.notNull(spELContext, "SpELContext must not be null"); - this.mappingContext = Lazy.of(mappingContextSupplier); - this.spelContextSupplier = spelContextSupplier; + this.mappingContext = mappingContext; + this.spELContext = spELContext; this.codec = new ParameterBindingDocumentCodec(); } + @Nullable Object readReference(MongoPersistentProperty property, Object value, LookupFunction lookupFunction, - ResultConversionFunction resultConversionFunction) { + MongoEntityReader entityReader) { - SpELContext spELContext = spelContextSupplier.get(); DocumentReferenceQuery filter = computeFilter(property, value, spELContext); ReferenceCollection referenceCollection = computeReferenceContext(property, value, spELContext); Iterable result = lookupFunction.apply(filter, referenceCollection); - if (!result.iterator().hasNext()) { - return null; + if (property.isCollectionLike()) { + return entityReader.read(result, property.getTypeInformation()); } - if (property.isCollectionLike()) { - return resultConversionFunction.apply(result, property.getTypeInformation()); + if (!result.iterator().hasNext()) { + return null; } - return resultConversionFunction.apply(result.iterator().next(), property.getTypeInformation()); + return entityReader.read(result.iterator().next(), property.getTypeInformation()); } private ReferenceCollection computeReferenceContext(MongoPersistentProperty property, Object value, @@ -107,6 +108,8 @@ private ReferenceCollection computeReferenceContext(MongoPersistentProperty prop return ReferenceCollection.fromDBRef((DBRef) value); } + String collection = mappingContext.getRequiredPersistentEntity(property.getAssociationTargetType()).getCollection(); + if (value instanceof Document) { Document ref = (Document) value; @@ -120,12 +123,12 @@ private ReferenceCollection computeReferenceContext(MongoPersistentProperty prop () -> ref.get("db", String.class)); String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext, () -> ref.get("collection", - mappingContext.get().getPersistentEntity(property.getAssociationTargetType()).getCollection())); + collection)); return new ReferenceCollection(targetDatabase, targetCollection); } return new ReferenceCollection(ref.getString("db"), ref.get("collection", - mappingContext.get().getPersistentEntity(property.getAssociationTargetType()).getCollection())); + collection)); } if (property.isDocumentReference()) { @@ -135,16 +138,16 @@ private ReferenceCollection computeReferenceContext(MongoPersistentProperty prop String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext, () -> null); String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext, - () -> mappingContext.get().getPersistentEntity(property.getAssociationTargetType()).getCollection()); + () -> collection); return new ReferenceCollection(targetDatabase, targetCollection); } return new ReferenceCollection(null, - mappingContext.get().getPersistentEntity(property.getAssociationTargetType()).getCollection()); + collection); } - @Nullable + @SuppressWarnings("unchecked") private T parseValueOrGet(String value, ParameterBindingContext bindingContext, Supplier defaultValue) { if (!StringUtils.hasText(value)) { @@ -153,7 +156,7 @@ private T parseValueOrGet(String value, ParameterBindingContext bindingConte if (!BsonUtils.isJsonDocument(value) && value.contains("?#{")) { String s = "{ 'target-value' : " + value + "}"; - T evaluated = (T) codec.decode(s, bindingContext).get("target-value "); + T evaluated = (T) codec.decode(s, bindingContext).get("target-value"); return evaluated != null ? evaluated : defaultValue.get(); } @@ -186,6 +189,7 @@ EvaluationContext evaluationContextFor(MongoPersistentProperty property, Object return ctx; } + @SuppressWarnings("unchecked") DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object value, SpELContext spELContext) { DocumentReference documentReference = property.getDocumentReference(); @@ -196,7 +200,7 @@ DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object va if (property.isCollectionLike() && value instanceof Collection) { List ors = new ArrayList<>(); - for (Object entry : (Collection) value) { + for (Object entry : (Collection) value) { Document decoded = codec.decode(lookup, bindingContext(property, entry, spELContext)); ors.add(decoded); @@ -209,7 +213,7 @@ DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object va Map filterMap = new LinkedHashMap<>(); - for (Entry entry : ((Map) value).entrySet()) { + for (Entry entry : ((Map) value).entrySet()) { Document decoded = codec.decode(lookup, bindingContext(property, entry.getValue(), spELContext)); filterMap.put(entry.getKey(), decoded); @@ -321,9 +325,9 @@ public Document getSort() { int compareAgainstReferenceIndex(List referenceList, Document document1, Document document2) { - for (int i = 0; i < referenceList.size(); i++) { + for (Document document : referenceList) { - Set> entries = referenceList.get(i).entrySet(); + Set> entries = document.entrySet(); if (document1.entrySet().containsAll(entries)) { return -1; } @@ -334,4 +338,10 @@ int compareAgainstReferenceIndex(List referenceList, Document document return referenceList.size(); } } + + @FunctionalInterface + interface LookupFunction { + + Iterable apply(DocumentReferenceQuery referenceQuery, ReferenceCollection referenceCollection); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java index f29dc16a7c..dae2043b4b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java @@ -15,13 +15,10 @@ */ package org.springframework.data.mongodb.core.convert; -import java.util.Collections; - -import org.bson.Document; -import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; +import org.springframework.util.Assert; import com.mongodb.DBRef; @@ -31,22 +28,8 @@ public interface ReferenceResolver { @Nullable - Object resolveReference(MongoPersistentProperty property, Object source, ReferenceReader referenceReader, - LookupFunction lookupFunction, ResultConversionFunction resultConversionFunction); - - default Object resolveReference(MongoPersistentProperty property, Object source, ReferenceReader referenceReader, - ResultConversionFunction resultConversionFunction) { - - return resolveReference(property, source, referenceReader, (filter, ctx) -> { - if (property.isCollectionLike() || property.isMap()) { - return getReferenceLoader().bulkFetch(filter, ctx); - - } - - Object target = getReferenceLoader().fetch(filter, ctx); - return target == null ? Collections.emptyList() : Collections.singleton(getReferenceLoader().fetch(filter, ctx)); - }, resultConversionFunction); - } + Object resolveReference(MongoPersistentProperty property, Object source, + ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader); ReferenceLoader getReferenceLoader(); @@ -58,6 +41,8 @@ class ReferenceCollection { public ReferenceCollection(@Nullable String database, String collection) { + Assert.hasText(collection, "Collection must not be empty or null"); + this.database = database; this.collection = collection; } @@ -76,13 +61,9 @@ public String getDatabase() { } } - @FunctionalInterface - interface LookupFunction { - Iterable apply(DocumentReferenceQuery referenceQuery, ReferenceCollection referenceCollection); - } @FunctionalInterface - interface ResultConversionFunction { - Object apply(Object source, TypeInformation property); + interface MongoEntityReader { + Object read(Object source, TypeInformation property); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java index b7b71a7fee..53af00fc54 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java @@ -47,7 +47,7 @@ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentPrope private static final Logger LOG = LoggerFactory.getLogger(BasicMongoPersistentProperty.class); - private static final String ID_FIELD_NAME = "_id"; + public static final String ID_FIELD_NAME = "_id"; private static final String LANGUAGE_FIELD_NAME = "language"; private static final Set> SUPPORTED_ID_TYPES = new HashSet>(); private static final Set SUPPORTED_ID_PROPERTY_NAMES = new HashSet(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java index f5d43c8ef0..91afb8c6ec 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java @@ -54,7 +54,8 @@ public static void assertProxyIsResolved(Object target, boolean expected) { public static void assertProxy(Object proxy, Consumer verification) { - LazyLoadingProxyGenerator.LazyLoadingInterceptor interceptor = (LazyLoadingProxyGenerator.LazyLoadingInterceptor) (proxy instanceof Advised ? ((Advised) proxy).getAdvisors()[0].getAdvice() + LazyLoadingProxyFactory.LazyLoadingInterceptor interceptor = (LazyLoadingProxyFactory.LazyLoadingInterceptor) (proxy instanceof Advised + ? ((Advised) proxy).getAdvisors()[0].getAdvice() : ((Factory) proxy).getCallback(0)); verification.accept(new LazyLoadingProxyValueRetriever(interceptor)); @@ -67,9 +68,9 @@ private static LazyLoadingInterceptor extractInterceptor(Object proxy) { public static class LazyLoadingProxyValueRetriever { - LazyLoadingProxyGenerator.LazyLoadingInterceptor interceptor; + LazyLoadingProxyFactory.LazyLoadingInterceptor interceptor; - public LazyLoadingProxyValueRetriever(LazyLoadingProxyGenerator.LazyLoadingInterceptor interceptor) { + public LazyLoadingProxyValueRetriever(LazyLoadingProxyFactory.LazyLoadingInterceptor interceptor) { this.interceptor = interceptor; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index 9c157db759..d371b32c12 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -1501,18 +1501,19 @@ static class WithDocumentReferences { SimpeEntityWithoutId noIdButLookupQuery; } - + + // TODO @Test void xxx() { - + Sample sample = new Sample(); sample.foo = "sample-id"; Query query = query(where("sample").is(sample)); - + org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(WithDocumentReferences.class)); - + System.out.println("mappedObject.toJson(): " + mappedObject.toJson()); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntityUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntityUnitTests.java index 28d5123502..9c898d28ce 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntityUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntityUnitTests.java @@ -30,11 +30,10 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.context.ApplicationContext; import org.springframework.core.annotation.AliasFor; -import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.mapping.MappingException; -import org.springframework.data.mongodb.core.index.Indexed; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.spel.ExtensionAwareEvaluationContextProvider; import org.springframework.data.spel.spi.EvaluationContextExtension; @@ -351,6 +350,9 @@ private static class DocumentWithComposedAnnotation {} @Document("#{myProperty}") class MappedWithExtension {} + @Document("${value.from.file}") + class MappedWithValue {} + @Document(collation = "#{myCollation}") class WithCollationFromSpEL {} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java index b70930dae0..8a462a9370 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java @@ -18,20 +18,13 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import org.springframework.data.mongodb.core.convert.ReferenceLoader; -import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery; -import org.springframework.data.mongodb.core.convert.ReferenceReader; -import org.springframework.data.util.TypeInformation; -import org.springframework.lang.Nullable; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import java.text.DecimalFormat; import java.util.*; -import java.util.function.BiFunction; import java.util.regex.Pattern; import java.util.stream.Collectors; -import java.util.stream.Stream; import org.bson.Document; import org.bson.types.ObjectId; @@ -48,12 +41,15 @@ import org.springframework.data.mongodb.core.convert.DbRefResolverCallback; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.ReferenceLoader; +import org.springframework.data.mongodb.core.convert.ReferenceLookupDelegate; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.repository.ReactiveMongoRepository; import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StopWatch; import org.springframework.util.StringUtils; @@ -106,7 +102,8 @@ public void setUp() throws Exception { @Nullable @Override - public Object resolveReference(MongoPersistentProperty property, Object source, ReferenceReader referenceReader, LookupFunction lookupFunction, ResultConversionFunction resultConversionFunction) { + public Object resolveReference(MongoPersistentProperty property, Object source, + ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) { return null; } From e96ef8e18f4e42c9afae34b7b503e0d95c43d169 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 19 May 2021 11:30:05 +0200 Subject: [PATCH 019/983] Avoid capturing lambdas, update javadoc and add tests. Also allow direct usage of (at)Reference from data commons to define associations. Original pull request: #3647. Closes #3602. --- .../convert/DefaultReferenceResolver.java | 69 +++--- .../core/convert/DocumentPointerFactory.java | 201 +++++++++++++----- .../core/convert/MappingMongoConverter.java | 45 ++-- .../MongoDatabaseFactoryReferenceLoader.java | 19 +- .../mongodb/core/convert/MongoWriter.java | 16 +- .../core/convert/NoOpDbRefResolver.java | 5 - .../mongodb/core/convert/QueryMapper.java | 5 +- .../mongodb/core/convert/ReferenceLoader.java | 62 ++++-- .../core/convert/ReferenceLookupDelegate.java | 182 ++++++++++++---- .../core/convert/ReferenceResolver.java | 60 +++++- .../data/mongodb/util/BsonUtils.java | 8 +- .../MongoTemplateDocumentReferenceTests.java | 105 ++++++++- .../DocumentPointerFactoryUnitTests.java | 139 ++++++++++++ .../MappingMongoConverterUnitTests.java | 10 +- .../performance/ReactivePerformanceTests.java | 4 - src/main/asciidoc/reference/mapping.adoc | 6 +- 16 files changed, 747 insertions(+), 189 deletions(-) create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactoryUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java index 7e38b6995d..a678fd7da6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java @@ -19,40 +19,45 @@ import java.util.Collections; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.lang.Nullable; +import org.springframework.util.Assert; /** + * {@link ReferenceResolver} implementation that uses a given {@link ReferenceLookupDelegate} to load and convert entity + * associations expressed via a {@link MongoPersistentProperty persitent property}. Creates {@link LazyLoadingProxy + * proxies} for associations that should be lazily loaded. + * * @author Christoph Strobl */ public class DefaultReferenceResolver implements ReferenceResolver { private final ReferenceLoader referenceLoader; + private final LookupFunction collectionLookupFunction = (filter, ctx) -> getReferenceLoader().fetchMany(filter, ctx); + private final LookupFunction singleValueLookupFunction = (filter, ctx) -> { + Object target = getReferenceLoader().fetchOne(filter, ctx); + return target == null ? Collections.emptyList() : Collections.singleton(getReferenceLoader().fetchOne(filter, ctx)); + }; + + /** + * Create a new instance of {@link DefaultReferenceResolver}. + * + * @param referenceLoader must not be {@literal null}. + */ public DefaultReferenceResolver(ReferenceLoader referenceLoader) { + + Assert.notNull(referenceLoader, "ReferenceLoader must not be null!"); this.referenceLoader = referenceLoader; } - @Override - public ReferenceLoader getReferenceLoader() { - return referenceLoader; - } - - @Nullable @Override public Object resolveReference(MongoPersistentProperty property, Object source, ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) { - LookupFunction lookupFunction = (filter, ctx) -> { - if (property.isCollectionLike() || property.isMap()) { - return getReferenceLoader().fetchMany(filter, ctx); - - } - - Object target = getReferenceLoader().fetchOne(filter, ctx); - return target == null ? Collections.emptyList() - : Collections.singleton(getReferenceLoader().fetchOne(filter, ctx)); - }; + LookupFunction lookupFunction = (property.isCollectionLike() || property.isMap()) ? collectionLookupFunction + : singleValueLookupFunction; if (isLazyReference(property)) { return createLazyLoadingProxy(property, source, referenceLookupDelegate, lookupFunction, entityReader); @@ -61,13 +66,14 @@ public Object resolveReference(MongoPersistentProperty property, Object source, return referenceLookupDelegate.readReference(property, source, lookupFunction, entityReader); } - private Object createLazyLoadingProxy(MongoPersistentProperty property, Object source, - ReferenceLookupDelegate referenceLookupDelegate, LookupFunction lookupFunction, - MongoEntityReader entityReader) { - return new LazyLoadingProxyFactory(referenceLookupDelegate).createLazyLoadingProxy(property, source, lookupFunction, - entityReader); - } - + /** + * Check if the association expressed by the given {@link MongoPersistentProperty property} should be resolved lazily. + * + * @param property + * @return return {@literal true} if the defined association is lazy. + * @see DBRef#lazy() + * @see DocumentReference#lazy() + */ protected boolean isLazyReference(MongoPersistentProperty property) { if (property.isDocumentReference()) { @@ -76,4 +82,19 @@ protected boolean isLazyReference(MongoPersistentProperty property) { return property.getDBRef() != null && property.getDBRef().lazy(); } + + /** + * The {@link ReferenceLoader} executing the lookup. + * + * @return never {@literal null}. + */ + protected ReferenceLoader getReferenceLoader() { + return referenceLoader; + } + + private Object createLazyLoadingProxy(MongoPersistentProperty property, Object source, + ReferenceLookupDelegate referenceLookupDelegate, LookupFunction lookupFunction, MongoEntityReader entityReader) { + return new LazyLoadingProxyFactory(referenceLookupDelegate).createLazyLoadingProxy(property, source, lookupFunction, + entityReader); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java index 8e9554b6ba..09d69e4b27 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java @@ -15,18 +15,20 @@ */ package org.springframework.data.mongodb.core.convert; -import java.util.HashMap; import java.util.LinkedHashMap; -import java.util.Locale; import java.util.Map; import java.util.Map.Entry; +import java.util.WeakHashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.bson.Document; - import org.springframework.core.convert.ConversionService; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.annotation.Reference; import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PropertyPath; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mapping.model.BeanWrapperPropertyAccessorFactory; import org.springframework.data.mongodb.core.mapping.DocumentPointer; @@ -34,6 +36,10 @@ import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; /** + * Internal API to construct {@link DocumentPointer} for a given property. Considers {@link LazyLoadingProxy}, + * registered {@link Object} to {@link DocumentPointer} {@link org.springframework.core.convert.converter.Converter}, + * simple {@literal _id} lookups and cases where the {@link DocumentPointer} needs to be computed via a lookup query. + * * @author Christoph Strobl * @since 3.3 */ @@ -41,17 +47,29 @@ class DocumentPointerFactory { private final ConversionService conversionService; private final MappingContext, MongoPersistentProperty> mappingContext; - private final Map linkageMap; - - public DocumentPointerFactory(ConversionService conversionService, + private final Map cache; + + /** + * A {@link Pattern} matching quoted and unquoted variants (with/out whitespaces) of + * {'_id' : ?#{#target} }. + */ + private static final Pattern DEFAULT_LOOKUP_PATTERN = Pattern.compile("\\{\\s?" + // document start (whitespace opt) + "['\"]?_id['\"]?" + // followed by an optionally quoted _id. Like: _id, '_id' or "_id" + "?\\s?:\\s?" + // then a colon optionally wrapped inside whitespaces + "['\"]?\\?#\\{#target\\}['\"]?" + // leading to the potentially quoted ?#{#target} expression + "\\s*}"); // some optional whitespaces and document close + + DocumentPointerFactory(ConversionService conversionService, MappingContext, MongoPersistentProperty> mappingContext) { this.conversionService = conversionService; this.mappingContext = mappingContext; - this.linkageMap = new HashMap<>(); + this.cache = new WeakHashMap<>(); } - public DocumentPointer computePointer(MongoPersistentProperty property, Object value, Class typeHint) { + DocumentPointer computePointer( + MappingContext, MongoPersistentProperty> mappingContext, + MongoPersistentProperty property, Object value, Class typeHint) { if (value instanceof LazyLoadingProxy) { return () -> ((LazyLoadingProxy) value).getSource(); @@ -59,92 +77,161 @@ public DocumentPointer computePointer(MongoPersistentProperty property, Objec if (conversionService.canConvert(typeHint, DocumentPointer.class)) { return conversionService.convert(value, DocumentPointer.class); - } else { + } - MongoPersistentEntity persistentEntity = mappingContext - .getRequiredPersistentEntity(property.getAssociationTargetType()); + MongoPersistentEntity persistentEntity = mappingContext + .getRequiredPersistentEntity(property.getAssociationTargetType()); - // TODO: Extract method - if (!property.getDocumentReference().lookup().toLowerCase(Locale.ROOT).replaceAll("\\s", "").replaceAll("'", "") - .equals("{_id:?#{#target}}")) { + if (usesDefaultLookup(property)) { + return () -> persistentEntity.getIdentifierAccessor(value).getIdentifier(); + } - MongoPersistentEntity valueEntity = mappingContext.getPersistentEntity(value.getClass()); - PersistentPropertyAccessor propertyAccessor; - if (valueEntity == null) { - propertyAccessor = BeanWrapperPropertyAccessorFactory.INSTANCE.getPropertyAccessor(property.getOwner(), - value); - } else { - propertyAccessor = valueEntity.getPropertyAccessor(value); + MongoPersistentEntity valueEntity = mappingContext.getPersistentEntity(value.getClass()); + PersistentPropertyAccessor propertyAccessor; + if (valueEntity == null) { + propertyAccessor = BeanWrapperPropertyAccessorFactory.INSTANCE.getPropertyAccessor(property.getOwner(), value); + } else { + propertyAccessor = valueEntity.getPropertyPathAccessor(value); + } - } + return cache.computeIfAbsent(property.getDocumentReference().lookup(), LinkageDocument::from) + .getDocumentPointer(mappingContext, persistentEntity, propertyAccessor); + } - return () -> linkageMap.computeIfAbsent(property.getDocumentReference().lookup(), LinkageDocument::new) - .get(persistentEntity, propertyAccessor); - } + private boolean usesDefaultLookup(MongoPersistentProperty property) { - // just take the id as a reference - return () -> persistentEntity.getIdentifierAccessor(value).getIdentifier(); + if (property.isDocumentReference()) { + return DEFAULT_LOOKUP_PATTERN.matcher(property.getDocumentReference().lookup()).matches(); + } + + Reference atReference = property.findAnnotation(Reference.class); + if (atReference != null) { + return true; } + + throw new IllegalStateException(String.format("%s does not seem to be define Reference", property)); } + /** + * Value object that computes a document pointer from a given lookup query by identifying SpEL expressions and + * inverting it. + * + *
        +	 * // source
        +	 * { 'firstname' : ?#{fn}, 'lastname' : '?#{ln} }
        +	 * 
        +	 * // target
        +	 * { 'fn' : ..., 'ln' : ... }
        +	 * 
        + * + * The actual pointer is the computed via + * {@link #getDocumentPointer(MappingContext, MongoPersistentEntity, PersistentPropertyAccessor)} applying values from + * the provided {@link PersistentPropertyAccessor} to the target document by looking at the keys of the expressions + * from the source. + */ static class LinkageDocument { - static final Pattern pattern = Pattern.compile("\\?#\\{#?[\\w\\d]*\\}"); + static final Pattern EXPRESSION_PATTERN = Pattern.compile("\\?#\\{#?(?[\\w\\d\\.\\-)]*)\\}"); + static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("###_(?\\d*)_###"); - String lookup; - org.bson.Document fetchDocument; - Map mapMap; + private final String lookup; + private final org.bson.Document documentPointer; + private final Map placeholderMap; - public LinkageDocument(String lookup) { + static LinkageDocument from(String lookup) { + return new LinkageDocument(lookup); + } - this.lookup = lookup; - String targetLookup = lookup; + private LinkageDocument(String lookup) { + this.lookup = lookup; + this.placeholderMap = new LinkedHashMap<>(); - Matcher matcher = pattern.matcher(lookup); int index = 0; - mapMap = new LinkedHashMap<>(); + Matcher matcher = EXPRESSION_PATTERN.matcher(lookup); + String targetLookup = lookup; - // TODO: Make explicit what's happening here while (matcher.find()) { - String expr = matcher.group(); - String sanitized = expr.substring(0, expr.length() - 1).replace("?#{#", "").replace("?#{", "") - .replace("target.", "").replaceAll("'", ""); - mapMap.put(index, sanitized); - targetLookup = targetLookup.replace(expr, index + ""); + String expression = matcher.group(); + String fieldName = matcher.group("fieldName").replace("target.", ""); + + String placeholder = placeholder(index); + placeholderMap.put(placeholder, fieldName); + targetLookup = targetLookup.replace(expression, "'" + placeholder + "'"); index++; } - fetchDocument = org.bson.Document.parse(targetLookup); + this.documentPointer = org.bson.Document.parse(targetLookup); } - org.bson.Document get(MongoPersistentEntity persistentEntity, PersistentPropertyAccessor propertyAccessor) { + private String placeholder(int index) { + return "###_" + index + "_###"; + } - org.bson.Document targetDocument = new Document(); + private boolean isPlaceholder(String key) { + return PLACEHOLDER_PATTERN.matcher(key).matches(); + } - // TODO: recursive matching over nested Documents or would the parameter binding json parser be a thing? - // like we have it ordered by index values and could provide the parameter array from it. + DocumentPointer getDocumentPointer( + MappingContext, MongoPersistentProperty> mappingContext, + MongoPersistentEntity persistentEntity, PersistentPropertyAccessor propertyAccessor) { + return () -> updatePlaceholders(documentPointer, new Document(), mappingContext, persistentEntity, + propertyAccessor); + } + + Document updatePlaceholders(org.bson.Document source, org.bson.Document target, + MappingContext, MongoPersistentProperty> mappingContext, + MongoPersistentEntity persistentEntity, PersistentPropertyAccessor propertyAccessor) { - for (Entry entry : fetchDocument.entrySet()) { + for (Entry entry : source.entrySet()) { + + if (entry.getKey().startsWith("$")) { + throw new InvalidDataAccessApiUsageException(String.format( + "Cannot derive document pointer from lookup '%s' using query operator (%s). Please consider registering a custom converter.", + lookup, entry.getKey())); + } - if (entry.getKey().equals("target")) { + if (entry.getValue() instanceof Document) { - String refKey = mapMap.get(entry.getValue()); + MongoPersistentProperty persistentProperty = persistentEntity.getPersistentProperty(entry.getKey()); + if (persistentProperty != null && persistentProperty.isEntity()) { - if (persistentEntity.hasIdProperty()) { - targetDocument.put(refKey, propertyAccessor.getProperty(persistentEntity.getIdProperty())); + MongoPersistentEntity nestedEntity = mappingContext.getPersistentEntity(persistentProperty.getType()); + target.put(entry.getKey(), updatePlaceholders((Document) entry.getValue(), new Document(), mappingContext, + nestedEntity, nestedEntity.getPropertyAccessor(propertyAccessor.getProperty(persistentProperty)))); } else { - targetDocument.put(refKey, propertyAccessor.getBean()); + target.put(entry.getKey(), updatePlaceholders((Document) entry.getValue(), new Document(), mappingContext, + persistentEntity, propertyAccessor)); } continue; } - Object target = propertyAccessor.getProperty(persistentEntity.getPersistentProperty(entry.getKey())); - String refKey = mapMap.get(entry.getValue()); - targetDocument.put(refKey, target); + if (placeholderMap.containsKey(entry.getValue())) { + + String attribute = placeholderMap.get(entry.getValue()); + if (attribute.contains(".")) { + attribute = attribute.substring(attribute.lastIndexOf('.') + 1); + } + + String fieldName = entry.getKey().equals("_id") ? "id" : entry.getKey(); + if (!fieldName.contains(".")) { + + Object targetValue = propertyAccessor.getProperty(persistentEntity.getPersistentProperty(fieldName)); + target.put(attribute, targetValue); + continue; + } + + PersistentPropertyPath path = mappingContext + .getPersistentPropertyPath(PropertyPath.from(fieldName, persistentEntity.getTypeInformation())); + Object targetValue = propertyAccessor.getProperty(path); + target.put(attribute, targetValue); + continue; + } + + target.put(entry.getKey(), entry.getValue()); } - return targetDocument; + return target; } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 8a77b51e2f..87f0adeb62 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -45,6 +45,7 @@ import org.springframework.core.CollectionFactory; import org.springframework.core.convert.ConversionService; import org.springframework.core.convert.support.DefaultConversionService; +import org.springframework.data.annotation.Reference; import org.springframework.data.convert.TypeMapper; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.MappingException; @@ -526,7 +527,7 @@ private void readAssociation(Association association, P return; } - if (property.isDocumentReference()) { + if (property.isDocumentReference() || (!property.isDbReference() && property.findAnnotation(Reference.class) != null)) { // quite unusual but sounds like worth having? @@ -587,43 +588,46 @@ public DBRef toDBRef(Object object, @Nullable MongoPersistentProperty referringP return createDBRef(object, referringProperty); } - public Object toDocumentReference(Object source, @Nullable MongoPersistentProperty referringProperty) { + @Override + public DocumentPointer toDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) { if (source instanceof LazyLoadingProxy) { - return ((LazyLoadingProxy) source).getSource(); + return () -> ((LazyLoadingProxy) source).getSource(); } - if (referringProperty != null) { + Assert.notNull(referringProperty, "Cannot create DocumentReference. The referringProperty must not be null!"); if (referringProperty.isDbReference()) { - return toDBRef(source, referringProperty); + return () -> toDBRef(source, referringProperty); } - if (referringProperty.isDocumentReference()) { + + if (referringProperty.isDocumentReference() || referringProperty.findAnnotation(Reference.class) != null) { return createDocumentPointer(source, referringProperty); } - } - throw new RuntimeException("oops - what's that " + source); + throw new IllegalArgumentException("The referringProperty is neither a DBRef nor a document reference"); } - Object createDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) { + DocumentPointer createDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) { if (referringProperty == null) { - return source; + return () -> source; + } + + if(source instanceof DocumentPointer) { + return (DocumentPointer) source; } if (ClassUtils.isAssignableValue(referringProperty.getType(), source) && conversionService.canConvert(referringProperty.getType(), DocumentPointer.class)) { - return conversionService.convert(source, DocumentPointer.class).getPointer(); + return conversionService.convert(source, DocumentPointer.class); } if (ClassUtils.isAssignableValue(referringProperty.getAssociationTargetType(), source)) { - return documentPointerFactory.computePointer(referringProperty, source, referringProperty.getActualType()) - .getPointer(); - + return documentPointerFactory.computePointer(mappingContext, referringProperty, source, referringProperty.getActualType()); } - return source; + return () -> source; } /** @@ -813,7 +817,7 @@ protected void writePropertyInternal(@Nullable Object obj, DocumentAccessor acce if (prop.isAssociation()) { accessor.put(prop, new DocumentPointerFactory(conversionService, mappingContext) - .computePointer(prop, obj, valueType.getType()).getPointer()); + .computePointer(mappingContext, prop, obj, valueType.getType()).getPointer()); return; } @@ -864,13 +868,14 @@ protected List createCollection(Collection collection, MongoPersisten return mappingContext.getPersistentEntity(property.getAssociationTargetType()).getIdentifierAccessor(it) .getIdentifier(); } - }).collect(Collectors.toList()), ClassTypeInformation.from(DocumentPointer.class), new BasicDBList()); + }).collect(Collectors.toList()), ClassTypeInformation.from(DocumentPointer.class), new ArrayList<>()); } if (property.hasExplicitWriteTarget()) { return writeCollectionInternal(collection, new FieldTypeInformation<>(property), new ArrayList<>()); } - return writeCollectionInternal(collection, property.getTypeInformation(), new BasicDBList()); + + return writeCollectionInternal(collection, property.getTypeInformation(), new ArrayList<>()); } List dbList = new ArrayList<>(collection.size()); @@ -960,7 +965,7 @@ private List writeCollectionInternal(Collection source, @Nullable Typ collection.add(getPotentiallyConvertedSimpleWrite(element, componentType != null ? componentType.getType() : Object.class)); } else if (element instanceof Collection || elementType.isArray()) { - collection.add(writeCollectionInternal(BsonUtils.asCollection(element), componentType, new BasicDBList())); + collection.add(writeCollectionInternal(BsonUtils.asCollection(element), componentType, new ArrayList<>())); } else { Document document = new Document(); writeInternal(element, document, componentType); @@ -992,7 +997,7 @@ protected Bson writeMapInternal(Map obj, Bson bson, TypeInformat writeSimpleInternal(val, bson, simpleKey); } else if (val instanceof Collection || val.getClass().isArray()) { BsonUtils.addToMap(bson, simpleKey, - writeCollectionInternal(BsonUtils.asCollection(val), propertyType.getMapValueType(), new BasicDBList())); + writeCollectionInternal(BsonUtils.asCollection(val), propertyType.getMapValueType(), new ArrayList<>())); } else { Document document = new Document(); TypeInformation valueTypeInfo = propertyType.isMap() ? propertyType.getMapValueType() diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoDatabaseFactoryReferenceLoader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoDatabaseFactoryReferenceLoader.java index 2483f57543..0973e5a5fb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoDatabaseFactoryReferenceLoader.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoDatabaseFactoryReferenceLoader.java @@ -27,6 +27,9 @@ import com.mongodb.client.MongoCollection; /** + * {@link ReferenceLoader} implementation using a {@link MongoDatabaseFactory} to obtain raw {@link Document documents} + * for linked entities via a {@link ReferenceLoader.DocumentReferenceQuery}. + * * @author Christoph Strobl */ public class MongoDatabaseFactoryReferenceLoader implements ReferenceLoader { @@ -35,6 +38,9 @@ public class MongoDatabaseFactoryReferenceLoader implements ReferenceLoader { private final MongoDatabaseFactory mongoDbFactory; + /** + * @param mongoDbFactory must not be {@literal null}. + */ public MongoDatabaseFactoryReferenceLoader(MongoDatabaseFactory mongoDbFactory) { Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!"); @@ -43,20 +49,27 @@ public MongoDatabaseFactoryReferenceLoader(MongoDatabaseFactory mongoDbFactory) } @Override - public Iterable fetchMany(DocumentReferenceQuery filter, ReferenceCollection context) { + public Iterable fetchMany(DocumentReferenceQuery referenceQuery, ReferenceCollection context) { MongoCollection collection = getCollection(context); if (LOGGER.isTraceEnabled()) { - LOGGER.trace("Bulk fetching {} from {}.{}.", filter, + LOGGER.trace("Bulk fetching {} from {}.{}.", referenceQuery, StringUtils.hasText(context.getDatabase()) ? context.getDatabase() : collection.getNamespace().getDatabaseName(), context.getCollection()); } - return filter.apply(collection); + return referenceQuery.apply(collection); } + /** + * Obtain the {@link MongoCollection} for a given {@link ReferenceCollection} from the underlying + * {@link MongoDatabaseFactory}. + * + * @param context must not be {@literal null}. + * @return the {@link MongoCollection} targeted by the {@link ReferenceCollection}. + */ protected MongoCollection getCollection(ReferenceCollection context) { return MongoDatabaseUtils.getDatabase(context.getDatabase(), mongoDbFactory).getCollection(context.getCollection(), diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoWriter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoWriter.java index 779b3236d3..6bef57cb63 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoWriter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoWriter.java @@ -17,6 +17,8 @@ import org.bson.conversions.Bson; import org.springframework.data.convert.EntityWriter; +import org.springframework.data.mongodb.core.mapping.DocumentPointer; +import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.util.TypeInformation; @@ -61,6 +63,7 @@ default Object convertToMongoType(@Nullable Object obj) { default Object convertToMongoType(@Nullable Object obj, MongoPersistentEntity entity) { return convertToMongoType(obj, entity.getTypeInformation()); } + /** * Creates a {@link DBRef} to refer to the given object. * @@ -71,7 +74,16 @@ default Object convertToMongoType(@Nullable Object obj, MongoPersistentEntity */ DBRef toDBRef(Object object, @Nullable MongoPersistentProperty referingProperty); - default Object toDocumentReference(Object source, @Nullable MongoPersistentProperty referringProperty) { - return toDBRef(source, referringProperty); + /** + * Creates a the {@link DocumentPointer} representing the link to another entity. + * + * @param source the object to create a document link to. + * @param referringProperty the client-side property referring to the object which might carry additional metadata for + * the {@link DBRef} object to create. Can be {@literal null}. + * @return will never be {@literal null}. + * @since 3.3 + */ + default DocumentPointer toDocumentPointer(Object source, @Nullable MongoPersistentProperty referringProperty) { + return () -> toDBRef(source, referringProperty); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java index 41d7ab3c12..587d1a5047 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/NoOpDbRefResolver.java @@ -77,9 +77,4 @@ public Object resolveReference(MongoPersistentProperty property, Object source, ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader) { return null; } - - @Override - public ReferenceLoader getReferenceLoader() { - return handle(); - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index 36353e4f86..81c1c96ddf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -26,6 +26,7 @@ import org.bson.types.ObjectId; import org.springframework.core.convert.ConversionService; import org.springframework.core.convert.converter.Converter; +import org.springframework.data.annotation.Reference; import org.springframework.data.domain.Example; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.MappingException; @@ -672,8 +673,8 @@ private Object createReferenceFor(Object source, MongoPersistentProperty propert return (DBRef) source; } - if(property != null && property.isDocumentReference()) { - return converter.toDocumentReference(source, property); + if(property != null && (property.isDocumentReference() || (!property.isDbReference() && property.findAnnotation(Reference.class) != null))) { + return converter.toDocumentPointer(source, property).getPointer(); } return converter.toDBRef(source, property); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java index 7cfd5e3153..2f96f57da2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java @@ -26,33 +26,70 @@ import com.mongodb.client.MongoCollection; /** + * The {@link ReferenceLoader} obtains raw {@link Document documents} for linked entities via a + * {@link ReferenceLoader.DocumentReferenceQuery}. + * * @author Christoph Strobl + * @since 3.3 */ public interface ReferenceLoader { + /** + * Obtain a single {@link Document} matching the given {@literal referenceQuery} in the {@literal context}. + * + * @param referenceQuery must not be {@literal null}. + * @param context must not be {@literal null}. + * @return the matching {@link Document} or {@literal null} if none found. + */ @Nullable - default Document fetchOne(DocumentReferenceQuery filter, ReferenceCollection context) { + default Document fetchOne(DocumentReferenceQuery referenceQuery, ReferenceCollection context) { - Iterator it = fetchMany(filter, context).iterator(); + Iterator it = fetchMany(referenceQuery, context).iterator(); return it.hasNext() ? it.next() : null; } - Iterable fetchMany(DocumentReferenceQuery filter, ReferenceCollection context); - + /** + * Obtain multiple {@link Document} matching the given {@literal referenceQuery} in the {@literal context}. + * + * @param referenceQuery must not be {@literal null}. + * @param context must not be {@literal null}. + * @return the matching {@link Document} or {@literal null} if none found. + */ + Iterable fetchMany(DocumentReferenceQuery referenceQuery, ReferenceCollection context); + + /** + * The {@link DocumentReferenceQuery} defines the criteria by which {@link Document documents} should be matched + * applying potentially given order criteria. + */ interface DocumentReferenceQuery { - Bson getFilter(); - + /** + * Get the query to obtain matching {@link Document documents}. + * + * @return never {@literal null}. + */ + Bson getQuery(); + + /** + * Get the sort criteria for ordering results. + * + * @return an empty {@link Document} by default. Never {@literal null}. + */ default Bson getSort() { return new Document(); } // TODO: Move apply method into something else that holds the collection and knows about single item/multi-item - // processing default Iterable apply(MongoCollection collection) { - return restoreOrder(collection.find(getFilter()).sort(getSort())); + return restoreOrder(collection.find(getQuery()).sort(getSort())); } + /** + * Restore the order of fetched documents. + * + * @param documents must not be {@literal null}. + * @return never {@literal null}. + */ default Iterable restoreOrder(Iterable documents) { return documents; } @@ -62,14 +99,14 @@ static DocumentReferenceQuery forSingleDocument(Bson bson) { return new DocumentReferenceQuery() { @Override - public Bson getFilter() { + public Bson getQuery() { return bson; } @Override public Iterable apply(MongoCollection collection) { - Document result = collection.find(getFilter()).sort(getSort()).limit(1).first(); + Document result = collection.find(getQuery()).sort(getSort()).limit(1).first(); return result != null ? Collections.singleton(result) : Collections.emptyList(); } }; @@ -80,16 +117,15 @@ static DocumentReferenceQuery forManyDocuments(Bson bson) { return new DocumentReferenceQuery() { @Override - public Bson getFilter() { + public Bson getQuery() { return bson; } @Override public Iterable apply(MongoCollection collection) { - return collection.find(getFilter()).sort(getSort()); + return collection.find(getQuery()).sort(getSort()); } }; } } - } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java index 3c441c1388..09f4c1a8ae 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java @@ -15,6 +15,7 @@ */ package org.springframework.data.mongodb.core.convert; +import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -29,7 +30,6 @@ import org.bson.Document; import org.bson.conversions.Bson; - import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mapping.model.SpELContext; import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery; @@ -64,6 +64,12 @@ public final class ReferenceLookupDelegate { private final SpELContext spELContext; private final ParameterBindingDocumentCodec codec; + /** + * Create a new {@link ReferenceLookupDelegate}. + * + * @param mappingContext must not be {@literal null}. + * @param spELContext must not be {@literal null}. + */ public ReferenceLookupDelegate( MappingContext, MongoPersistentProperty> mappingContext, SpELContext spELContext) { @@ -76,11 +82,20 @@ public ReferenceLookupDelegate( this.codec = new ParameterBindingDocumentCodec(); } + /** + * Read the reference expressed by the given property. + * + * @param property the reference defining property. Must not be {@literal null}. THe + * @param value the source value identifying to the referenced entity. Must not be {@literal null}. + * @param lookupFunction to execute a lookup query. Must not be {@literal null}. + * @param entityReader the callback to convert raw source values into actual domain types. Must not be + * {@literal null}. + * @return can be {@literal null}. + */ @Nullable - Object readReference(MongoPersistentProperty property, Object value, LookupFunction lookupFunction, + public Object readReference(MongoPersistentProperty property, Object value, LookupFunction lookupFunction, MongoEntityReader entityReader) { - DocumentReferenceQuery filter = computeFilter(property, value, spELContext); ReferenceCollection referenceCollection = computeReferenceContext(property, value, spELContext); @@ -100,10 +115,12 @@ Object readReference(MongoPersistentProperty property, Object value, LookupFunct private ReferenceCollection computeReferenceContext(MongoPersistentProperty property, Object value, SpELContext spELContext) { + // Use the first value as a reference for others in case of collection like if (value instanceof Iterable) { value = ((Iterable) value).iterator().next(); } + // handle DBRef value if (value instanceof DBRef) { return ReferenceCollection.fromDBRef((DBRef) value); } @@ -112,7 +129,7 @@ private ReferenceCollection computeReferenceContext(MongoPersistentProperty prop if (value instanceof Document) { - Document ref = (Document) value; + Document documentPointer = (Document) value; if (property.isDocumentReference()) { @@ -120,15 +137,13 @@ private ReferenceCollection computeReferenceContext(MongoPersistentProperty prop DocumentReference documentReference = property.getDocumentReference(); String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext, - () -> ref.get("db", String.class)); + () -> documentPointer.get("db", String.class)); String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext, - () -> ref.get("collection", - collection)); + () -> documentPointer.get("collection", collection)); return new ReferenceCollection(targetDatabase, targetCollection); } - return new ReferenceCollection(ref.getString("db"), ref.get("collection", - collection)); + return new ReferenceCollection(documentPointer.getString("db"), documentPointer.get("collection", collection)); } if (property.isDocumentReference()) { @@ -137,16 +152,24 @@ private ReferenceCollection computeReferenceContext(MongoPersistentProperty prop DocumentReference documentReference = property.getDocumentReference(); String targetDatabase = parseValueOrGet(documentReference.db(), bindingContext, () -> null); - String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext, - () -> collection); + String targetCollection = parseValueOrGet(documentReference.collection(), bindingContext, () -> collection); return new ReferenceCollection(targetDatabase, targetCollection); } - return new ReferenceCollection(null, - collection); + return new ReferenceCollection(null, collection); } + /** + * Use the given {@link ParameterBindingContext} to compute potential expressions against the value. + * + * @param value must not be {@literal null}. + * @param bindingContext must not be {@literal null}. + * @param defaultValue + * @param + * @return can be {@literal null}. + */ + @Nullable @SuppressWarnings("unchecked") private T parseValueOrGet(String value, ParameterBindingContext bindingContext, Supplier defaultValue) { @@ -154,12 +177,17 @@ private T parseValueOrGet(String value, ParameterBindingContext bindingConte return defaultValue.get(); } + // parameter binding requires a document, since we do not have one, construct it. if (!BsonUtils.isJsonDocument(value) && value.contains("?#{")) { String s = "{ 'target-value' : " + value + "}"; T evaluated = (T) codec.decode(s, bindingContext).get("target-value"); return evaluated != null ? evaluated : defaultValue.get(); } + if (BsonUtils.isJsonDocument(value)) { + return (T) codec.decode(value, bindingContext); + } + T evaluated = (T) bindingContext.evaluateExpression(value); return evaluated != null ? evaluated : defaultValue.get(); } @@ -171,8 +199,8 @@ ParameterBindingContext bindingContext(MongoPersistentProperty property, Object } ValueProvider valueProviderFor(Object source) { - return (index) -> { + return (index) -> { if (source instanceof Document) { return Streamable.of(((Document) source).values()).toList().get(index); } @@ -189,13 +217,24 @@ EvaluationContext evaluationContextFor(MongoPersistentProperty property, Object return ctx; } + /** + * Compute the query to retrieve linked documents. + * + * @param property must not be {@literal null}. + * @param value must not be {@literal null}. + * @param spELContext must not be {@literal null}. + * @return never {@literal null}. + */ @SuppressWarnings("unchecked") DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object value, SpELContext spELContext) { - DocumentReference documentReference = property.getDocumentReference(); + DocumentReference documentReference = property.isDocumentReference() ? property.getDocumentReference() + : ReferenceEmulatingDocumentReference.INSTANCE; + String lookup = documentReference.lookup(); - Document sort = parseValueOrGet(documentReference.sort(), bindingContext(property, value, spELContext), () -> null); + Document sort = parseValueOrGet(documentReference.sort(), bindingContext(property, value, spELContext), + () -> new Document()); if (property.isCollectionLike() && value instanceof Collection) { @@ -225,45 +264,94 @@ DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object va return new SingleDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, value, spELContext)), sort); } + enum ReferenceEmulatingDocumentReference implements DocumentReference { + + INSTANCE; + + @Override + public Class annotationType() { + return DocumentReference.class; + } + + @Override + public String db() { + return ""; + } + + @Override + public String collection() { + return ""; + } + + @Override + public String lookup() { + return "{ '_id' : ?#{#target} }"; + } + + @Override + public String sort() { + return ""; + } + + @Override + public boolean lazy() { + return false; + } + } + + /** + * {@link DocumentReferenceQuery} implementation fetching a single {@link Document}. + */ static class SingleDocumentReferenceQuery implements DocumentReferenceQuery { - Document filter; - Document sort; + private final Document query; + private final Document sort; + + public SingleDocumentReferenceQuery(Document query, Document sort) { - public SingleDocumentReferenceQuery(Document filter, Document sort) { - this.filter = filter; + this.query = query; this.sort = sort; } @Override - public Bson getFilter() { - return filter; + public Bson getQuery() { + return query; + } + + @Override + public Document getSort() { + return sort; } @Override public Iterable apply(MongoCollection collection) { - Document result = collection.find(getFilter()).limit(1).first(); + Document result = collection.find(getQuery()).sort(getSort()).limit(1).first(); return result != null ? Collections.singleton(result) : Collections.emptyList(); } } + /** + * {@link DocumentReferenceQuery} implementation to retrieve linked {@link Document documents} stored inside a + * {@link Map} structure. Restores the original map order by matching individual query documents against the actual + * values. + */ static class MapDocumentReferenceQuery implements DocumentReferenceQuery { - private final Document filter; + private final Document query; private final Document sort; private final Map filterOrderMap; - public MapDocumentReferenceQuery(Document filter, Document sort, Map filterOrderMap) { + public MapDocumentReferenceQuery(Document query, Document sort, Map filterOrderMap) { - this.filter = filter; + this.query = query; this.sort = sort; this.filterOrderMap = filterOrderMap; } @Override - public Bson getFilter() { - return filter; + public Bson getQuery() { + return query; } @Override @@ -289,33 +377,38 @@ public Iterable restoreOrder(Iterable documents) { } } + /** + * {@link DocumentReferenceQuery} implementation to retrieve linked {@link Document documents} stored inside a + * {@link Collection} like structure. Restores the original order by matching individual query documents against the + * actual values. + */ static class ListDocumentReferenceQuery implements DocumentReferenceQuery { - private final Document filter; + private final Document query; private final Document sort; - public ListDocumentReferenceQuery(Document filter, Document sort) { + public ListDocumentReferenceQuery(Document query, Document sort) { - this.filter = filter; + this.query = query; this.sort = sort; } @Override public Iterable restoreOrder(Iterable documents) { - if (filter.containsKey("$or")) { - List ors = filter.get("$or", List.class); - List target = documents instanceof List ? (List) documents - : Streamable.of(documents).toList(); - return target.stream().sorted((o1, o2) -> compareAgainstReferenceIndex(ors, o1, o2)) - .collect(Collectors.toList()); + List target = documents instanceof List ? (List) documents + : Streamable.of(documents).toList(); + + if (!sort.isEmpty() || !query.containsKey("$or")) { + return target; } - return documents; + List ors = query.get("$or", List.class); + return target.stream().sorted((o1, o2) -> compareAgainstReferenceIndex(ors, o1, o2)).collect(Collectors.toList()); } - public Document getFilter() { - return filter; + public Document getQuery() { + return query; } @Override @@ -339,9 +432,18 @@ int compareAgainstReferenceIndex(List referenceList, Document document } } + /** + * The function that can execute a given {@link DocumentReferenceQuery} within the {@link ReferenceCollection} to + * obtain raw results. + */ @FunctionalInterface interface LookupFunction { + /** + * @param referenceQuery never {@literal null}. + * @param referenceCollection never {@literal null}. + * @return never {@literal null}. + */ Iterable apply(DocumentReferenceQuery referenceQuery, ReferenceCollection referenceCollection); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java index dae2043b4b..91235b5270 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java @@ -15,6 +15,7 @@ */ package org.springframework.data.mongodb.core.convert; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; @@ -23,47 +24,92 @@ import com.mongodb.DBRef; /** + * The {@link ReferenceResolver} allows to load and convert linked entities. + * * @author Christoph Strobl */ public interface ReferenceResolver { + /** + * Resolve the association defined via the given property from a given source value. May deliver a + * {@link LazyLoadingProxy proxy instance} in case of a lazy loading association. + * + * @param property the association defining property. + * @param source the association source value. + * @param referenceLookupDelegate the lookup executing component. + * @param entityReader conversion function capable of constructing entities from raw source. + * @return can be {@literal null}. + */ @Nullable Object resolveReference(MongoPersistentProperty property, Object source, ReferenceLookupDelegate referenceLookupDelegate, MongoEntityReader entityReader); - ReferenceLoader getReferenceLoader(); - + /** + * {@link ReferenceCollection} is a value object that contains information about the target database and collection + * name of an association. + */ class ReferenceCollection { - @Nullable + @Nullable // private final String database; private final String collection; + /** + * @param database can be {@literal null} to indicate the configured default + * {@link MongoDatabaseFactory#getMongoDatabase() database} should be used. + * @param collection the target collection name. Must not be {@literal null}. + */ public ReferenceCollection(@Nullable String database, String collection) { - Assert.hasText(collection, "Collection must not be empty or null"); + Assert.hasText(collection, "Collection must not be empty or null!"); this.database = database; this.collection = collection; } - static ReferenceCollection fromDBRef(DBRef dbRef) { + /** + * Create a new instance of {@link ReferenceCollection} from the given {@link DBRef}. + * + * @param dbRef must not be {@literal null}. + * @return new instance of {@link ReferenceCollection}. + */ + public static ReferenceCollection fromDBRef(DBRef dbRef) { return new ReferenceCollection(dbRef.getDatabaseName(), dbRef.getCollectionName()); } + /** + * Get the target collection name. + * + * @return never {@literal null}. + */ public String getCollection() { return collection; } + /** + * Get the target database name. If {@literal null} the default database should be used. + * + * @return can be {@literal null}. + */ @Nullable public String getDatabase() { return database; } } - + /** + * Domain type conversion callback interface that allows to read + */ @FunctionalInterface interface MongoEntityReader { - Object read(Object source, TypeInformation property); + + /** + * Read values from the given source into an object defined via the given {@link TypeInformation}. + * + * @param source never {@literal null}. + * @param typeInformation information abount the desired target type. + * @return never {@literal null}. + */ + Object read(Object source, TypeInformation typeInformation); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java index f4bd9436e7..4d51af7dee 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java @@ -349,7 +349,13 @@ public static String toJson(@Nullable Document source) { * @since 3.0 */ public static boolean isJsonDocument(@Nullable String value) { - return StringUtils.hasText(value) && (value.startsWith("{") && value.endsWith("}")); + + if(!StringUtils.hasText(value)) { + return false; + } + + String potentialJson = value.trim(); + return potentialJson.startsWith("{") && potentialJson.endsWith("}"); } /** diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java index 593e1cd3cf..2c1caf316a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java @@ -38,6 +38,7 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.Reference; import org.springframework.data.convert.WritingConverter; import org.springframework.data.mongodb.core.convert.LazyLoadingTestUtils; import org.springframework.data.mongodb.core.mapping.DocumentPointer; @@ -380,6 +381,33 @@ void readCollectionObjectReferenceFromDocumentDeclaringCollectionName() { new ObjectRefOfDocumentWithEmbeddedCollectionName("ref-1", "me-the-1-referenced-object")); } + @Test // GH-3602 + void useOrderFromAnnotatedSort() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + String refCollectionName = template.getCollectionName(SimpleObjectRef.class); + Document refSource1 = new Document("_id", "ref-1").append("value", "me-the-1-referenced-object"); + Document refSource2 = new Document("_id", "ref-2").append("value", "me-the-2-referenced-object"); + Document refSource3 = new Document("_id", "ref-3").append("value", "me-the-3-referenced-object"); + Document source = new Document("_id", "id-1").append("value", "v1").append("simpleSortedValueRef", + Arrays.asList("ref-1", "ref-3", "ref-2")); + + template.execute(db -> { + + db.getCollection(refCollectionName).insertOne(refSource1); + db.getCollection(refCollectionName).insertOne(refSource2); + db.getCollection(refCollectionName).insertOne(refSource3); + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.getSimpleSortedValueRef()).containsExactly( + new SimpleObjectRef("ref-3", "me-the-3-referenced-object"), + new SimpleObjectRef("ref-2", "me-the-2-referenced-object"), + new SimpleObjectRef("ref-1", "me-the-1-referenced-object")); + } + @Test // GH-3602 void readObjectReferenceFromDocumentNotRelatingToTheIdProperty() { @@ -857,7 +885,8 @@ void updateDerivedMappingFromLookup() { template.save(book); - template.update(Book.class).matching(where("id").is(book.id)).apply(new Update().set("publisher", publisher)).first(); + template.update(Book.class).matching(where("id").is(book.id)).apply(new Update().set("publisher", publisher)) + .first(); Document target = template.execute(db -> { return db.getCollection(template.getCollectionName(Book.class)).find(Filters.eq("_id", book.id)).first(); @@ -890,6 +919,56 @@ void queryDerivedMappingFromLookup() { assertThat(result.publisher).isNotNull(); } + @Test // GH-3602 + void allowsDirectUsageOfAtReference() { + + Publisher publisher = new Publisher(); + publisher.id = "p-1"; + publisher.acronym = "TOR"; + publisher.name = "Tom Doherty Associates"; + + template.save(publisher); + + UsingAtReference root = new UsingAtReference(); + root.id = "book-1"; + root.publisher = publisher; + + template.save(root); + + Document target = template.execute(db -> { + return db.getCollection(template.getCollectionName(UsingAtReference.class)).find(Filters.eq("_id", root.id)).first(); + }); + + assertThat(target).containsEntry("publisher", "p-1"); + + UsingAtReference result = template.findOne(query(where("id").is(root.id)), UsingAtReference.class); + assertThat(result.publisher).isNotNull(); + } + + @Test // GH-3602 + void updateWhenUsingAtReferenceDirectly() { + + Publisher publisher = new Publisher(); + publisher.id = "p-1"; + publisher.acronym = "TOR"; + publisher.name = "Tom Doherty Associates"; + + template.save(publisher); + + UsingAtReference root = new UsingAtReference(); + root.id = "book-1"; + + template.save(root); + template.update(UsingAtReference.class).matching(where("id").is(root.id)).apply(new Update().set("publisher", publisher)).first(); + + Document target = template.execute(db -> { + return db.getCollection(template.getCollectionName(UsingAtReference.class)).find(Filters.eq("_id", root.id)).first(); + }); + + assertThat(target).containsEntry("publisher", "p-1"); + + } + @Data static class SingleRefRoot { @@ -930,6 +1009,9 @@ static class CollectionRefRoot { @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // List simpleValueRef; + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }", sort = "{ '_id' : -1 } ") // + List simpleSortedValueRef; + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // Map mapValueRef; @@ -1051,7 +1133,8 @@ public DocumentPointer convert(SimpleObjectRefWithReadingConverter sou static class WithRefA/* to B */ implements ReferenceAble { @Id String id; - @DocumentReference WithRefB toB; + @DocumentReference // + WithRefB toB; @Override public Object toReference() { @@ -1065,9 +1148,11 @@ public Object toReference() { static class WithRefB/* to A */ implements ReferenceAble { @Id String id; - @DocumentReference(lazy = true) WithRefA lazyToA; + @DocumentReference(lazy = true) // + WithRefA lazyToA; - @DocumentReference WithRefA eagerToA; + @DocumentReference // + WithRefA eagerToA; @Override public Object toReference() { @@ -1091,7 +1176,8 @@ static class Book { String id; - @DocumentReference(lookup = "{ 'acronym' : ?#{acc}, 'name' : ?#{n} }") Publisher publisher; + @DocumentReference(lookup = "{ 'acronym' : ?#{acc}, 'name' : ?#{n} }") // + Publisher publisher; } @@ -1102,4 +1188,13 @@ static class Publisher { String name; } + @Data + static class UsingAtReference { + + String id; + + @Reference // + Publisher publisher; + } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactoryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactoryUnitTests.java new file mode 100644 index 0000000000..6990ddfe88 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactoryUnitTests.java @@ -0,0 +1,139 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import static org.assertj.core.api.Assertions.*; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.mongodb.core.convert.DocumentPointerFactory.LinkageDocument; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; + +/** + * @author Christoph Strobl + */ +public class DocumentPointerFactoryUnitTests { + + @Test // GH-3602 + void errorsOnMongoOperatorUsage() { + + LinkageDocument source = LinkageDocument.from("{ '_id' : { '$eq' : 1 } }"); + + assertThatExceptionOfType(InvalidDataAccessApiUsageException.class) + .isThrownBy(() -> getPointerValue(source, new Book())) // + .withMessageContaining("$eq"); + } + + @Test // GH-3602 + void computesStaticPointer() { + + LinkageDocument source = LinkageDocument.from("{ '_id' : 1 }"); + + assertThat(getPointerValue(source, new Book())).isEqualTo(new Document("_id", 1)); + } + + @Test // GH-3602 + void computesPointerWithIdValuePlaceholder() { + + LinkageDocument source = LinkageDocument.from("{ '_id' : ?#{id} }"); + + assertThat(getPointerValue(source, new Book("book-1", null, null))).isEqualTo(new Document("id", "book-1")); + } + + @Test // GH-3602 + void computesPointerForNonIdValuePlaceholder() { + + LinkageDocument source = LinkageDocument.from("{ 'title' : ?#{book_title} }"); + + assertThat(getPointerValue(source, new Book("book-1", "Living With A Seal", null))) + .isEqualTo(new Document("book_title", "Living With A Seal")); + } + + @Test // GH-3602 + void computesPlaceholderFromNestedPathValue() { + + LinkageDocument source = LinkageDocument.from("{ 'metadata.pages' : ?#{p} } }"); + + assertThat(getPointerValue(source, new Book("book-1", "Living With A Seal", null, new Metadata(272)))) + .isEqualTo(new Document("p", 272)); + } + + @Test // GH-3602 + void computesNestedPlaceholderPathValue() { + + LinkageDocument source = LinkageDocument.from("{ 'metadata' : { 'pages' : ?#{metadata.pages} } }"); + + assertThat(getPointerValue(source, new Book("book-1", "Living With A Seal", null, new Metadata(272)))) + .isEqualTo(new Document("metadata", new Document("pages", 272))); + } + + Object getPointerValue(LinkageDocument linkageDocument, Object value) { + + MongoMappingContext mappingContext = new MongoMappingContext(); + MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(value.getClass()); + return linkageDocument + .getDocumentPointer(mappingContext, persistentEntity, persistentEntity.getPropertyPathAccessor(value)) + .getPointer(); + } + + @Data + @AllArgsConstructor + @NoArgsConstructor + static class Book { + String id; + String title; + List author; + Metadata metadata; + + public Book(String id, String title, List author) { + this.id = id; + this.title = title; + this.author = author; + } + } + + static class Metadata { + + int pages; + + public Metadata(int pages) { + this.pages = pages; + } + + public int getPages() { + return pages; + } + + public void setPages(int pages) { + this.pages = pages; + } + } + + @Data + static class Author { + String id; + String firstname; + String lastname; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index a3836fd8b3..bd3e98788f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -580,9 +580,9 @@ void writesMapsOfObjectsCorrectly() { org.bson.Document map = (org.bson.Document) field; Object foo = map.get("Foo"); - assertThat(foo).isInstanceOf(BasicDBList.class); + assertThat(foo).isInstanceOf(List.class); - BasicDBList value = (BasicDBList) foo; + List value = (List) foo; assertThat(value.size()).isEqualTo(1); assertThat(value.get(0)).isEqualTo("Bar"); } @@ -695,9 +695,9 @@ void writesPlainMapOfCollectionsCorrectly() { assertThat(result.containsKey("Foo")).isTrue(); assertThat(result.get("Foo")).isNotNull(); - assertThat(result.get("Foo")).isInstanceOf(BasicDBList.class); + assertThat(result.get("Foo")).isInstanceOf(List.class); - BasicDBList list = (BasicDBList) result.get("Foo"); + List list = (List) result.get("Foo"); assertThat(list.size()).isEqualTo(1); assertThat(list.get(0)).isEqualTo(Locale.US.toString()); @@ -744,7 +744,7 @@ void writesArraysAsMapValuesCorrectly() { org.bson.Document map = (org.bson.Document) mapObject; Object valueObject = map.get("foo"); - assertThat(valueObject).isInstanceOf(BasicDBList.class); + assertThat(valueObject).isInstanceOf(List.class); List list = (List) valueObject; assertThat(list.size()).isEqualTo(1); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java index 8a462a9370..be9335f2eb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/performance/ReactivePerformanceTests.java @@ -129,10 +129,6 @@ public List bulkFetch(List dbRefs) { return null; } - @Override - public ReferenceLoader getReferenceLoader() { - return null; - } }, context); operations = new ReactiveMongoTemplate(mongoDbFactory, converter); diff --git a/src/main/asciidoc/reference/mapping.adoc b/src/main/asciidoc/reference/mapping.adoc index 1998fe1ad8..9f9a461ee6 100644 --- a/src/main/asciidoc/reference/mapping.adoc +++ b/src/main/asciidoc/reference/mapping.adoc @@ -902,6 +902,10 @@ It is possible to alter resolution defaults (listed below) via the attributes of | The single document lookup query evaluating placeholders via SpEL expressions using `#target` as the marker for a given source value. `Collection` like or `Map` properties combine individual lookups via an `$or` operator. | An `_id` field based query (`{ '_id' : ?#{#target} }`) using the loaded source value. +| `sort` +| Used for sorting result documents on server side. +| None by default. Result order of `Collection` like properties is restored based on the used lookup query. + | `lazy` | If set to `true` value resolution is delayed upon first access of the property. | Resolves properties eagerly by default. @@ -1182,7 +1186,7 @@ We know it is tempting to use all kinds of MongoDB query operators in the lookup * Mind that resolution takes time and consider a lazy strategy. * A collection of document references is bulk loaded using an `$or` operator. + The original element order is restored in memory which cannot be done when using MongoDB query operators. -In this case Results will be ordered as they are received from the store. +In this case Results will be ordered as they are received from the store or via the provided `@DocumentReference(sort = ...)` attribute. And a few more general remarks: From 5ab75eb65aada8c3989b99b2441810e2e16204a0 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 21 May 2021 10:55:59 +0200 Subject: [PATCH 020/983] Polishing. Reduce dependencies in tests by using NoOpDbRefResolver. Add since tags. Tweak documentation. Extract entity references into own documentation fragment. Original pull request: #3647. Closes #3602. --- .../core/convert/MappingMongoConverter.java | 2 - .../MongoDatabaseFactoryReferenceLoader.java | 3 +- .../mongodb/core/convert/ReferenceLoader.java | 3 +- .../core/convert/ReferenceLookupDelegate.java | 1 + .../mongodb/core/mapping/DocumentPointer.java | 5 +- .../core/mapping/DocumentReference.java | 64 +-- .../MongoTransactionManagerUnitTests.java | 28 +- .../mongodb/core/CountQueryUnitTests.java | 33 +- .../MongoTemplateDocumentReferenceTests.java | 8 +- .../FilterExpressionUnitTests.java | 5 +- .../convert/CustomConvertersUnitTests.java | 3 +- .../convert/MappingMongoConverterTests.java | 26 +- .../convert/MongoExampleMapperUnitTests.java | 70 ++- .../core/convert/QueryMapperUnitTests.java | 22 +- .../core/convert/UpdateMapperUnitTests.java | 4 +- .../query/AbstractMongoQueryUnitTests.java | 5 +- .../ConvertingParameterAccessorUnitTests.java | 22 +- .../query/MongoQueryCreatorUnitTests.java | 120 ++--- .../query/PartTreeMongoQueryUnitTests.java | 38 +- .../src/test/resources/logback.xml | 1 - src/main/asciidoc/new-features.adoc | 2 +- .../reference/document-references.adoc | 440 ++++++++++++++++++ src/main/asciidoc/reference/mapping.adoc | 429 +---------------- 23 files changed, 680 insertions(+), 654 deletions(-) create mode 100644 src/main/asciidoc/reference/document-references.adoc diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 87f0adeb62..413ce2ce44 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -546,8 +546,6 @@ private void readAssociation(Association association, P DBRef dbref = value instanceof DBRef ? (DBRef) value : null; - // TODO: accessor.setProperty(property, dbRefResolver.resolveReference(property, value, referenceReader, - // context::convert)); accessor.setProperty(property, dbRefResolver.resolveDbRef(property, dbref, callback, handler)); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoDatabaseFactoryReferenceLoader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoDatabaseFactoryReferenceLoader.java index 0973e5a5fb..d68af1fb5a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoDatabaseFactoryReferenceLoader.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoDatabaseFactoryReferenceLoader.java @@ -29,8 +29,9 @@ /** * {@link ReferenceLoader} implementation using a {@link MongoDatabaseFactory} to obtain raw {@link Document documents} * for linked entities via a {@link ReferenceLoader.DocumentReferenceQuery}. - * + * * @author Christoph Strobl + * @since 3.3 */ public class MongoDatabaseFactoryReferenceLoader implements ReferenceLoader { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java index 2f96f57da2..70a0f43c0f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLoader.java @@ -28,7 +28,7 @@ /** * The {@link ReferenceLoader} obtains raw {@link Document documents} for linked entities via a * {@link ReferenceLoader.DocumentReferenceQuery}. - * + * * @author Christoph Strobl * @since 3.3 */ @@ -79,7 +79,6 @@ default Bson getSort() { return new Document(); } - // TODO: Move apply method into something else that holds the collection and knows about single item/multi-item default Iterable apply(MongoCollection collection) { return restoreOrder(collection.find(getQuery()).sort(getSort())); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java index 09f4c1a8ae..616abb325e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java @@ -57,6 +57,7 @@ * * @author Christoph Strobl * @author Mark Paluch + * @since 3.3 */ public final class ReferenceLookupDelegate { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentPointer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentPointer.java index de7fbff866..3b432a8c12 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentPointer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentPointer.java @@ -17,8 +17,9 @@ /** * A custom pointer to a linked document to be used along with {@link DocumentReference} for storing the linkage value. - * + * * @author Christoph Strobl + * @since 3.3 */ @FunctionalInterface public interface DocumentPointer { @@ -27,7 +28,7 @@ public interface DocumentPointer { * The actual pointer value. This can be any simple type, like a {@link String} or {@link org.bson.types.ObjectId} or * a {@link org.bson.Document} holding more information like the target collection, multiple fields forming the key, * etc. - * + * * @return the value stored in MongoDB and used for constructing the {@link DocumentReference#lookup() lookup query}. */ T getPointer(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java index 0846c4022c..6fd5e96877 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/DocumentReference.java @@ -22,13 +22,14 @@ import java.lang.annotation.Target; import org.springframework.data.annotation.Reference; +import org.springframework.data.mongodb.MongoDatabaseFactory; /** - * A {@link DocumentReference} offers an alternative way of linking entities in MongoDB. While the goal is the same as - * when using {@link DBRef}, the store representation is different and can be literally anything, a single value, an - * entire {@link org.bson.Document}, basically everything that can be stored in MongoDB. By default, the mapping layer - * will use the referenced entities {@literal id} value for storage and retrieval. - * + * A {@link DocumentReference} allows referencing entities in MongoDB using a flexible schema. While the goal is the + * same as when using {@link DBRef}, the store representation is different. The reference can be anything, a single + * value, an entire {@link org.bson.Document}, basically everything that can be stored in MongoDB. By default, the + * mapping layer will use the referenced entities {@literal id} value for storage and retrieval. + * *
          * public class Account {
          *   private String id;
        @@ -40,7 +41,7 @@
          *   @DocumentReference
          *   private List<Account> accounts;
          * }
        - * 
        + *
          * Account account = ...
          *
          * mongoTemplate.insert(account);
        @@ -50,43 +51,41 @@
          *   .apply(new Update().push("accounts").value(account))
          *   .first();
          * 
        - * - * {@link #lookup()} allows to define custom queries that are independent from the {@literal id} field and in - * combination with {@link org.springframework.data.convert.WritingConverter writing converters} offer a flexible way of - * defining links between entities. - * + * + * {@link #lookup()} allows defining a query filter that is independent from the {@literal _id} field and in combination + * with {@link org.springframework.data.convert.WritingConverter writing converters} offers a flexible way of defining + * references between entities. + * *
          * public class Book {
        - * 	 private ObjectId id;
        - * 	 private String title;
        + * 	private ObjectId id;
        + * 	private String title;
          *
        - * 	 @Field("publisher_ac")
        - * 	 @DocumentReference(lookup = "{ 'acronym' : ?#{#target} }")
        - * 	 private Publisher publisher;
        + * 	@Field("publisher_ac") @DocumentReference(lookup = "{ 'acronym' : ?#{#target} }") private Publisher publisher;
          * }
          *
          * public class Publisher {
          *
        - * 	 private ObjectId id;
        - * 	 private String acronym;
        - * 	 private String name;
        + * 	private ObjectId id;
        + * 	private String acronym;
        + * 	private String name;
          *
        - * 	 @DocumentReference(lazy = true)
        - * 	 private List<Book> books;
        + * 	@DocumentReference(lazy = true) private List<Book> books;
          * }
          *
          * @WritingConverter
          * public class PublisherReferenceConverter implements Converter<Publisher, DocumentPointer<String>> {
          *
        - *    public DocumentPointer<String> convert(Publisher source) {
        + * 	public DocumentPointer<String> convert(Publisher source) {
          * 		return () -> source.getAcronym();
        - *    }
        + * 	}
          * }
          * 
        * * @author Christoph Strobl * @since 3.3 - * @see MongoDB Reference Documentation + * @see MongoDB + * Reference Documentation */ @Documented @Retention(RetentionPolicy.RUNTIME) @@ -95,22 +94,25 @@ public @interface DocumentReference { /** - * The database the linked entity resides in. + * The database the referenced entity resides in. Uses the default database provided by + * {@link org.springframework.data.mongodb.MongoDatabaseFactory} if empty. * - * @return empty String by default. Uses the default database provided buy the {@link org.springframework.data.mongodb.MongoDatabaseFactory}. + * @see MongoDatabaseFactory#getMongoDatabase() + * @see MongoDatabaseFactory#getMongoDatabase(String) */ String db() default ""; /** - * The database the linked entity resides in. + * The collection the referenced entity resides in. Defaults to the collection of the referenced entity type. * - * @return empty String by default. Uses the property type for collection resolution. + * @see MongoPersistentEntity#getCollection() */ String collection() default ""; /** - * The single document lookup query. In case of an {@link java.util.Collection} or {@link java.util.Map} property - * the individual lookups are combined via an `$or` operator. + * The single document lookup query. In case of an {@link java.util.Collection} or {@link java.util.Map} property the + * individual lookups are combined via an {@code $or} operator. {@code target} points to the source value (or + * document) stored at the reference property. Properties of {@code target} can be used to define the reference query. * * @return an {@literal _id} based lookup. */ @@ -118,8 +120,6 @@ /** * A specific sort. - * - * @return empty String by default. */ String sort() default ""; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoTransactionManagerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoTransactionManagerUnitTests.java index dfb48fdbb1..bb05a283b2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoTransactionManagerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoTransactionManagerUnitTests.java @@ -25,6 +25,7 @@ import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.TransactionStatus; @@ -37,12 +38,15 @@ import com.mongodb.client.ClientSession; import com.mongodb.client.MongoDatabase; import com.mongodb.session.ServerSession; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; /** * @author Christoph Strobl */ @ExtendWith(MockitoExtension.class) -public class MongoTransactionManagerUnitTests { +@MockitoSettings(strictness = Strictness.LENIENT) +class MongoTransactionManagerUnitTests { @Mock ClientSession session; @Mock ClientSession session2; @@ -53,23 +57,25 @@ public class MongoTransactionManagerUnitTests { @Mock MongoDatabase db2; @BeforeEach - public void setUp() { + void setUp() { when(dbFactory.getSession(any())).thenReturn(session, session2); + when(dbFactory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); + when(dbFactory2.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); when(dbFactory.withSession(session)).thenReturn(dbFactory); when(dbFactory.getMongoDatabase()).thenReturn(db); when(session.getServerSession()).thenReturn(serverSession); } @AfterEach - public void verifyTransactionSynchronizationManager() { + void verifyTransactionSynchronizationManager() { assertThat(TransactionSynchronizationManager.getResourceMap().isEmpty()).isTrue(); assertThat(TransactionSynchronizationManager.isSynchronizationActive()).isFalse(); } @Test // DATAMONGO-1920 - public void triggerCommitCorrectly() { + void triggerCommitCorrectly() { MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition()); @@ -91,7 +97,7 @@ public void triggerCommitCorrectly() { } @Test // DATAMONGO-1920 - public void participateInOnGoingTransactionWithCommit() { + void participateInOnGoingTransactionWithCommit() { MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition()); @@ -126,7 +132,7 @@ protected void doInTransactionWithoutResult(TransactionStatus status) { } @Test // DATAMONGO-1920 - public void participateInOnGoingTransactionWithRollbackOnly() { + void participateInOnGoingTransactionWithRollbackOnly() { MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition()); @@ -163,7 +169,7 @@ protected void doInTransactionWithoutResult(TransactionStatus status) { } @Test // DATAMONGO-1920 - public void triggerRollbackCorrectly() { + void triggerRollbackCorrectly() { MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition()); @@ -185,7 +191,7 @@ public void triggerRollbackCorrectly() { } @Test // DATAMONGO-1920 - public void suspendTransactionWhilePropagationNotSupported() { + void suspendTransactionWhilePropagationNotSupported() { MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); TransactionStatus txStatus = txManager.getTransaction(new DefaultTransactionDefinition()); @@ -228,7 +234,7 @@ protected void doInTransactionWithoutResult(TransactionStatus status) { } @Test // DATAMONGO-1920 - public void suspendTransactionWhilePropagationRequiresNew() { + void suspendTransactionWhilePropagationRequiresNew() { when(dbFactory.withSession(session2)).thenReturn(dbFactory2); when(dbFactory2.getMongoDatabase()).thenReturn(db2); @@ -277,7 +283,7 @@ protected void doInTransactionWithoutResult(TransactionStatus status) { } @Test // DATAMONGO-1920 - public void readonlyShouldInitiateASessionStartAndCommitTransaction() { + void readonlyShouldInitiateASessionStartAndCommitTransaction() { MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); @@ -303,7 +309,7 @@ public void readonlyShouldInitiateASessionStartAndCommitTransaction() { } @Test // DATAMONGO-1920 - public void readonlyShouldInitiateASessionStartAndRollbackTransaction() { + void readonlyShouldInitiateASessionStartAndRollbackTransaction() { MongoTransactionManager txManager = new MongoTransactionManager(dbFactory); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CountQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CountQueryUnitTests.java index 4b5ed6f2a8..dcb35a7d4b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CountQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/CountQueryUnitTests.java @@ -27,6 +27,7 @@ import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; @@ -39,27 +40,25 @@ * @author Mark Paluch * @author Christoph Strobl */ -public class CountQueryUnitTests { +class CountQueryUnitTests { - QueryMapper mapper; - MongoMappingContext context; - MappingMongoConverter converter; - - MongoDatabaseFactory factory = mock(MongoDatabaseFactory.class); + private QueryMapper mapper; + private MongoMappingContext context; + private MappingMongoConverter converter; @BeforeEach - public void setUp() { + void setUp() { this.context = new MongoMappingContext(); - this.converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), context); + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); this.converter.afterPropertiesSet(); this.mapper = new QueryMapper(converter); } @Test // DATAMONGO-2059 - public void nearToGeoWithinWithoutDistance() { + void nearToGeoWithinWithoutDistance() { Query source = query(where("location").near(new Point(-73.99171, 40.738868))); org.bson.Document target = postProcessQueryForCount(source); @@ -69,7 +68,7 @@ public void nearToGeoWithinWithoutDistance() { } @Test // DATAMONGO-2059 - public void nearAndExisting$and() { + void nearAndExisting$and() { Query source = query(where("location").near(new Point(-73.99171, 40.738868)).minDistance(0.01)) .addCriteria(new Criteria().andOperator(where("foo").is("bar"))); @@ -83,7 +82,7 @@ public void nearToGeoWithinWithoutDistance() { } @Test // DATAMONGO-2059 - public void nearSphereToGeoWithinWithoutDistance() { + void nearSphereToGeoWithinWithoutDistance() { Query source = query(where("location").nearSphere(new Point(-73.99171, 40.738868))); org.bson.Document target = postProcessQueryForCount(source); @@ -93,7 +92,7 @@ public void nearSphereToGeoWithinWithoutDistance() { } @Test // DATAMONGO-2059 - public void nearToGeoWithinWithMaxDistance() { + void nearToGeoWithinWithMaxDistance() { Query source = query(where("location").near(new Point(-73.99171, 40.738868)).maxDistance(10)); org.bson.Document target = postProcessQueryForCount(source); @@ -103,7 +102,7 @@ public void nearToGeoWithinWithMaxDistance() { } @Test // DATAMONGO-2059 - public void nearSphereToGeoWithinWithMaxDistance() { + void nearSphereToGeoWithinWithMaxDistance() { Query source = query(where("location").nearSphere(new Point(-73.99171, 40.738868)).maxDistance(10)); org.bson.Document target = postProcessQueryForCount(source); @@ -113,7 +112,7 @@ public void nearSphereToGeoWithinWithMaxDistance() { } @Test // DATAMONGO-2059 - public void nearToGeoWithinWithMinDistance() { + void nearToGeoWithinWithMinDistance() { Query source = query(where("location").near(new Point(-73.99171, 40.738868)).minDistance(0.01)); org.bson.Document target = postProcessQueryForCount(source); @@ -124,7 +123,7 @@ public void nearToGeoWithinWithMinDistance() { } @Test // DATAMONGO-2059 - public void nearToGeoWithinWithMaxDistanceAndCombinedWithOtherCriteria() { + void nearToGeoWithinWithMaxDistanceAndCombinedWithOtherCriteria() { Query source = query( where("name").is("food").and("location").near(new Point(-73.99171, 40.738868)).maxDistance(10)); @@ -135,7 +134,7 @@ public void nearToGeoWithinWithMaxDistanceAndCombinedWithOtherCriteria() { } @Test // DATAMONGO-2059 - public void nearToGeoWithinWithMinDistanceOrCombinedWithOtherCriteria() { + void nearToGeoWithinWithMinDistanceOrCombinedWithOtherCriteria() { Query source = query(new Criteria().orOperator(where("name").is("food"), where("location").near(new Point(-73.99171, 40.738868)).minDistance(0.01))); @@ -146,7 +145,7 @@ public void nearToGeoWithinWithMinDistanceOrCombinedWithOtherCriteria() { } @Test // DATAMONGO-2059 - public void nearToGeoWithinWithMaxDistanceOrCombinedWithOtherCriteria() { + void nearToGeoWithinWithMaxDistanceOrCombinedWithOtherCriteria() { Query source = query(new Criteria().orOperator(where("name").is("food"), where("location").near(new Point(-73.99171, 40.738868)).maxDistance(10))); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java index 2c1caf316a..fa1deb4f1c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java @@ -1094,17 +1094,17 @@ public Object toReference() { } } - static class ReferencableConverter implements Converter { + static class ReferencableConverter implements Converter> { @Nullable @Override - public DocumentPointer convert(ReferenceAble source) { + public DocumentPointer convert(ReferenceAble source) { return source::toReference; } } @WritingConverter - class DocumentToSimpleObjectRefWithReadingConverter + static class DocumentToSimpleObjectRefWithReadingConverter implements Converter, SimpleObjectRefWithReadingConverter> { @Nullable @@ -1118,7 +1118,7 @@ public SimpleObjectRefWithReadingConverter convert(DocumentPointer sou } @WritingConverter - class SimpleObjectRefWithReadingConverterToDocumentConverter + static class SimpleObjectRefWithReadingConverterToDocumentConverter implements Converter> { @Nullable diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FilterExpressionUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FilterExpressionUnitTests.java index 1580b4efb0..a318a5559b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FilterExpressionUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/FilterExpressionUnitTests.java @@ -32,6 +32,7 @@ import org.springframework.data.mongodb.core.DocumentTestUtils; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; @@ -41,8 +42,6 @@ @ExtendWith(MockitoExtension.class) class FilterExpressionUnitTests { - @Mock MongoDatabaseFactory mongoDbFactory; - private AggregationOperationContext aggregationContext; private MongoMappingContext mappingContext; @@ -51,7 +50,7 @@ void setUp() { mappingContext = new MongoMappingContext(); aggregationContext = new TypeBasedAggregationOperationContext(Sales.class, mappingContext, - new QueryMapper(new MappingMongoConverter(new DefaultDbRefResolver(mongoDbFactory), mappingContext))); + new QueryMapper(new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext))); } @Test // DATAMONGO-1491 diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/CustomConvertersUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/CustomConvertersUnitTests.java index 1c1ba0715d..67b51f1140 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/CustomConvertersUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/CustomConvertersUnitTests.java @@ -49,7 +49,6 @@ class CustomConvertersUnitTests { @Mock BarToDocumentConverter barToDocumentConverter; @Mock DocumentToBarConverter documentToBarConverter; - @Mock MongoDatabaseFactory mongoDbFactory; private MongoMappingContext context; @@ -67,7 +66,7 @@ void setUp() { context.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); context.initialize(); - converter = new MappingMongoConverter(new DefaultDbRefResolver(mongoDbFactory), context); + converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); converter.setCustomConversions(conversions); converter.afterPropertiesSet(); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java index 66c2cc9822..2b17ed4b06 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java @@ -59,18 +59,18 @@ @ExtendWith(MongoClientExtension.class) public class MappingMongoConverterTests { - public static final String DATABASE = "mapping-converter-tests"; + private static final String DATABASE = "mapping-converter-tests"; - static @Client MongoClient client; + private static @Client MongoClient client; - MongoDatabaseFactory factory = new SimpleMongoClientDatabaseFactory(client, DATABASE); + private MongoDatabaseFactory factory = new SimpleMongoClientDatabaseFactory(client, DATABASE); - MappingMongoConverter converter; - MongoMappingContext mappingContext; - DbRefResolver dbRefResolver; + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; + private DbRefResolver dbRefResolver; @BeforeEach - public void setUp() { + void setUp() { MongoDatabase database = client.getDatabase(DATABASE); @@ -90,7 +90,7 @@ public void setUp() { } @Test // DATAMONGO-2004 - public void resolvesLazyDBRefOnAccess() { + void resolvesLazyDBRefOnAccess() { client.getDatabase(DATABASE).getCollection("samples") .insertMany(Arrays.asList(new Document("_id", "sample-1").append("value", "one"), @@ -102,7 +102,6 @@ public void resolvesLazyDBRefOnAccess() { WithLazyDBRef target = converter.read(WithLazyDBRef.class, source); verify(dbRefResolver).resolveDbRef(any(), isNull(), any(), any()); - verifyNoMoreInteractions(dbRefResolver); assertThat(target.lazyList).isInstanceOf(LazyLoadingProxy.class); assertThat(target.getLazyList()).contains(new Sample("sample-1", "one"), new Sample("sample-2", "two")); @@ -111,7 +110,7 @@ public void resolvesLazyDBRefOnAccess() { } @Test // DATAMONGO-2004 - public void resolvesLazyDBRefConstructorArgOnAccess() { + void resolvesLazyDBRefConstructorArgOnAccess() { client.getDatabase(DATABASE).getCollection("samples") .insertMany(Arrays.asList(new Document("_id", "sample-1").append("value", "one"), @@ -123,7 +122,6 @@ public void resolvesLazyDBRefConstructorArgOnAccess() { WithLazyDBRefAsConstructorArg target = converter.read(WithLazyDBRefAsConstructorArg.class, source); verify(dbRefResolver).resolveDbRef(any(), isNull(), any(), any()); - verifyNoMoreInteractions(dbRefResolver); assertThat(target.lazyList).isInstanceOf(LazyLoadingProxy.class); assertThat(target.getLazyList()).contains(new Sample("sample-1", "one"), new Sample("sample-2", "two")); @@ -132,7 +130,7 @@ public void resolvesLazyDBRefConstructorArgOnAccess() { } @Test // DATAMONGO-2400 - public void readJavaTimeValuesWrittenViaCodec() { + void readJavaTimeValuesWrittenViaCodec() { configureConverterWithNativeJavaTimeCodec(); MongoCollection mongoCollection = client.getDatabase(DATABASE).getCollection("java-time-types"); @@ -160,7 +158,7 @@ public static class WithLazyDBRef { @Id String id; @DBRef(lazy = true) List lazyList; - public List getLazyList() { + List getLazyList() { return lazyList; } } @@ -176,7 +174,7 @@ public WithLazyDBRefAsConstructorArg(String id, List lazyList) { this.lazyList = lazyList; } - public List getLazyList() { + List getLazyList() { return lazyList; } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoExampleMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoExampleMapperUnitTests.java index ef92b8ff0c..796eecc7f7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoExampleMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MongoExampleMapperUnitTests.java @@ -54,27 +54,25 @@ * @author Mark Paluch */ @ExtendWith(MockitoExtension.class) -public class MongoExampleMapperUnitTests { +class MongoExampleMapperUnitTests { - MongoExampleMapper mapper; - MongoMappingContext context; - MappingMongoConverter converter; - - @Mock MongoDatabaseFactory factory; + private MongoExampleMapper mapper; + private MongoMappingContext context; + private MappingMongoConverter converter; @BeforeEach - public void setUp() { + void setUp() { this.context = new MongoMappingContext(); - this.converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), context); + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); this.converter.afterPropertiesSet(); this.mapper = new MongoExampleMapper(converter); } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIdIsSet() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenIdIsSet() { FlatDocument probe = new FlatDocument(); probe.id = "steelheart"; @@ -84,7 +82,7 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIdIsSet() { } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenMultipleValuesSet() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenMultipleValuesSet() { FlatDocument probe = new FlatDocument(); probe.id = "steelheart"; @@ -98,7 +96,7 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenMultipleValuesSet() { } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIdIsNotSet() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenIdIsNotSet() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -110,7 +108,7 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIdIsNotSet() { } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenListHasValues() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenListHasValues() { FlatDocument probe = new FlatDocument(); probe.listOfString = Arrays.asList("Prof", "Tia", "David"); @@ -122,7 +120,7 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenListHasValues() { } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenFieldNameIsCustomized() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenFieldNameIsCustomized() { FlatDocument probe = new FlatDocument(); probe.customNamedField = "Mitosis"; @@ -132,7 +130,7 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenFieldNameIsCustomized() } @Test // DATAMONGO-1245 - public void typedExampleShouldContainTypeRestriction() { + void typedExampleShouldContainTypeRestriction() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); @@ -146,7 +144,7 @@ public void typedExampleShouldContainTypeRestriction() { } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedAsFlatMapWhenGivenNestedElementsWithLenientMatchMode() { + void exampleShouldBeMappedAsFlatMapWhenGivenNestedElementsWithLenientMatchMode() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); @@ -157,7 +155,7 @@ public void exampleShouldBeMappedAsFlatMapWhenGivenNestedElementsWithLenientMatc } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedAsExactObjectWhenGivenNestedElementsWithStrictMatchMode() { + void exampleShouldBeMappedAsExactObjectWhenGivenNestedElementsWithStrictMatchMode() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); @@ -170,7 +168,7 @@ public void exampleShouldBeMappedAsExactObjectWhenGivenNestedElementsWithStrictM } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsStarting() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsStarting() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -184,7 +182,7 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsStarti } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeContainingDotsWhenStringMatchModeIsStarting() { + void exampleShouldBeMappedCorrectlyForFlatTypeContainingDotsWhenStringMatchModeIsStarting() { FlatDocument probe = new FlatDocument(); probe.stringValue = "fire.ight"; @@ -198,7 +196,7 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeContainingDotsWhenStringMat } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsEnding() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsEnding() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -212,7 +210,7 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeIsEnding } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeRegex() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeRegex() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -226,7 +224,7 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenStringMatchModeRegex() } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabledAndMatchModeSet() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabledAndMatchModeSet() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -240,7 +238,7 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabledAndMat } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabled() { + void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabled() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -255,7 +253,7 @@ public void exampleShouldBeMappedCorrectlyForFlatTypeWhenIgnoreCaseEnabled() { } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedWhenContainingDBRef() { + void exampleShouldBeMappedWhenContainingDBRef() { FlatDocument probe = new FlatDocument(); probe.stringValue = "steelheart"; @@ -271,7 +269,7 @@ public void exampleShouldBeMappedWhenContainingDBRef() { } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedWhenDBRefIsNull() { + void exampleShouldBeMappedWhenDBRefIsNull() { FlatDocument probe = new FlatDocument(); probe.stringValue = "steelheart"; @@ -283,7 +281,7 @@ public void exampleShouldBeMappedWhenDBRefIsNull() { } @Test // DATAMONGO-1245 - public void exampleShouldBeMappedCorrectlyWhenContainingLegacyPoint() { + void exampleShouldBeMappedCorrectlyWhenContainingLegacyPoint() { ClassWithGeoTypes probe = new ClassWithGeoTypes(); probe.legacyPoint = new Point(10D, 20D); @@ -296,7 +294,7 @@ public void exampleShouldBeMappedCorrectlyWhenContainingLegacyPoint() { } @Test // DATAMONGO-1245 - public void mappingShouldExcludeFieldWithCustomNameCorrectly() { + void mappingShouldExcludeFieldWithCustomNameCorrectly() { FlatDocument probe = new FlatDocument(); probe.customNamedField = "foo"; @@ -311,7 +309,7 @@ public void mappingShouldExcludeFieldWithCustomNameCorrectly() { } @Test // DATAMONGO-1245 - public void mappingShouldExcludeFieldCorrectly() { + void mappingShouldExcludeFieldCorrectly() { FlatDocument probe = new FlatDocument(); probe.customNamedField = "foo"; @@ -326,7 +324,7 @@ public void mappingShouldExcludeFieldCorrectly() { } @Test // DATAMONGO-1245 - public void mappingShouldExcludeNestedFieldCorrectly() { + void mappingShouldExcludeNestedFieldCorrectly() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); @@ -342,7 +340,7 @@ public void mappingShouldExcludeNestedFieldCorrectly() { } @Test // DATAMONGO-1245 - public void mappingShouldExcludeNestedFieldWithCustomNameCorrectly() { + void mappingShouldExcludeNestedFieldWithCustomNameCorrectly() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); @@ -358,7 +356,7 @@ public void mappingShouldExcludeNestedFieldWithCustomNameCorrectly() { } @Test // DATAMONGO-1245 - public void mappingShouldFavorFieldSpecificationStringMatcherOverDefaultStringMatcher() { + void mappingShouldFavorFieldSpecificationStringMatcherOverDefaultStringMatcher() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -372,7 +370,7 @@ public void mappingShouldFavorFieldSpecificationStringMatcherOverDefaultStringMa } @Test // DATAMONGO-1245 - public void mappingShouldIncludePropertiesFromHierarchicalDocument() { + void mappingShouldIncludePropertiesFromHierarchicalDocument() { HierachicalDocument probe = new HierachicalDocument(); probe.stringValue = "firefight"; @@ -386,7 +384,7 @@ public void mappingShouldIncludePropertiesFromHierarchicalDocument() { } @Test // DATAMONGO-1459 - public void mapsAnyMatchingExampleCorrectly() { + void mapsAnyMatchingExampleCorrectly() { FlatDocument probe = new FlatDocument(); probe.stringValue = "firefight"; @@ -398,7 +396,7 @@ public void mapsAnyMatchingExampleCorrectly() { } @Test // DATAMONGO-1768 - public void allowIgnoringTypeRestrictionBySettingUpTypeKeyAsAnIgnoredPath() { + void allowIgnoringTypeRestrictionBySettingUpTypeKeyAsAnIgnoredPath() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); @@ -411,13 +409,13 @@ public void allowIgnoringTypeRestrictionBySettingUpTypeKeyAsAnIgnoredPath() { } @Test // DATAMONGO-1768 - public void allowIgnoringTypeRestrictionBySettingUpTypeKeyAsAnIgnoredPathWhenUsingCustomTypeMapper() { + void allowIgnoringTypeRestrictionBySettingUpTypeKeyAsAnIgnoredPathWhenUsingCustomTypeMapper() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); probe.flatDoc.stringValue = "conflux"; - MappingMongoConverter mappingMongoConverter = new MappingMongoConverter(new DefaultDbRefResolver(factory), context); + MappingMongoConverter mappingMongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); mappingMongoConverter.setTypeMapper(new DefaultMongoTypeMapper() { @Override @@ -445,7 +443,7 @@ public void writeType(TypeInformation info, Bson sink) { } @Test // DATAMONGO-1768 - public void untypedExampleShouldNotInferTypeRestriction() { + void untypedExampleShouldNotInferTypeRestriction() { WrapperDocument probe = new WrapperDocument(); probe.flatDoc = new FlatDocument(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index d371b32c12..f7b5ec76d7 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -34,14 +34,13 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.data.annotation.Id; import org.springframework.data.annotation.Transient; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.DocumentTestUtils; import org.springframework.data.mongodb.core.Person; import org.springframework.data.mongodb.core.geo.GeoJsonPoint; @@ -80,14 +79,12 @@ public class QueryMapperUnitTests { private MongoMappingContext context; private MappingMongoConverter converter; - @Mock MongoDatabaseFactory factory; - @BeforeEach void beforeEach() { this.context = new MongoMappingContext(); - this.converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), context); + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); this.converter.afterPropertiesSet(); this.mapper = new QueryMapper(converter); @@ -1502,19 +1499,4 @@ static class WithDocumentReferences { } - // TODO - @Test - void xxx() { - - Sample sample = new Sample(); - sample.foo = "sample-id"; - - Query query = query(where("sample").is(sample)); - - org.bson.Document mappedObject = mapper.getMappedObject(query.getQueryObject(), - context.getPersistentEntity(WithDocumentReferences.class)); - - System.out.println("mappedObject.toJson(): " + mappedObject.toJson()); - } - } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java index f5b5493327..a8d5f12b9f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java @@ -48,6 +48,7 @@ import org.springframework.data.mapping.MappingException; import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.Unwrapped; @@ -70,7 +71,6 @@ @ExtendWith(MockitoExtension.class) class UpdateMapperUnitTests { - @Mock MongoDatabaseFactory factory; private MappingMongoConverter converter; private MongoMappingContext context; private UpdateMapper mapper; @@ -88,7 +88,7 @@ void setUp() { this.context.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); this.context.initialize(); - this.converter = new MappingMongoConverter(new DefaultDbRefResolver(factory), context); + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); this.converter.setCustomConversions(conversions); this.converter.afterPropertiesSet(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractMongoQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractMongoQueryUnitTests.java index c2803b6124..92c99185eb 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractMongoQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/AbstractMongoQueryUnitTests.java @@ -45,6 +45,7 @@ import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; import org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.Person; import org.springframework.data.mongodb.core.convert.DbRefResolver; @@ -93,7 +94,9 @@ void setUp() { doReturn(persitentEntityMock).when(mappingContextMock).getRequiredPersistentEntity(Mockito.any(Class.class)); doReturn(Person.class).when(persitentEntityMock).getType(); - DbRefResolver dbRefResolver = new DefaultDbRefResolver(mock(MongoDatabaseFactory.class)); + MongoDatabaseFactory mongoDbFactory = mock(MongoDatabaseFactory.class); + when(mongoDbFactory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); + DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory); MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContextMock); converter.afterPropertiesSet(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessorUnitTests.java index 87994bcbec..1624f40d77 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/ConvertingParameterAccessorUnitTests.java @@ -28,6 +28,7 @@ import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; @@ -46,35 +47,36 @@ * @author Christoph Strobl */ @ExtendWith(MockitoExtension.class) -public class ConvertingParameterAccessorUnitTests { +class ConvertingParameterAccessorUnitTests { @Mock MongoDatabaseFactory factory; @Mock MongoParameterAccessor accessor; - MongoMappingContext context; - MappingMongoConverter converter; - DbRefResolver resolver; + private MongoMappingContext context; + private MappingMongoConverter converter; + private DbRefResolver resolver; @BeforeEach - public void setUp() { + void setUp() { + when(factory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); this.context = new MongoMappingContext(); this.resolver = new DefaultDbRefResolver(factory); this.converter = new MappingMongoConverter(resolver, context); } @Test - public void rejectsNullDbRefResolver() { + void rejectsNullDbRefResolver() { assertThatIllegalArgumentException().isThrownBy(() -> new MappingMongoConverter((DbRefResolver) null, context)); } @Test - public void rejectsNullContext() { + void rejectsNullContext() { assertThatIllegalArgumentException().isThrownBy(() -> new MappingMongoConverter(resolver, null)); } @Test - public void convertsCollectionUponAccess() { + void convertsCollectionUponAccess() { when(accessor.getBindableValue(0)).thenReturn(Arrays.asList("Foo")); @@ -88,7 +90,7 @@ public void convertsCollectionUponAccess() { } @Test // DATAMONGO-505 - public void convertsAssociationsToDBRef() { + void convertsAssociationsToDBRef() { Property property = new Property(); property.id = 5L; @@ -102,7 +104,7 @@ public void convertsAssociationsToDBRef() { } @Test // DATAMONGO-505 - public void convertsAssociationsToDBRefForCollections() { + void convertsAssociationsToDBRefForCollections() { Property property = new Property(); property.id = 5L; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryCreatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryCreatorUnitTests.java index 12446f7e0f..02d4b7bc09 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryCreatorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/MongoQueryCreatorUnitTests.java @@ -38,12 +38,14 @@ import org.springframework.data.geo.Shape; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; import org.springframework.data.mongodb.core.Person; import org.springframework.data.mongodb.core.Venue; import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.geo.GeoJsonLineString; import org.springframework.data.mongodb.core.geo.GeoJsonPoint; import org.springframework.data.mongodb.core.index.GeoSpatialIndexType; @@ -67,22 +69,20 @@ * @author Thomas Darimont * @author Christoph Strobl */ -public class MongoQueryCreatorUnitTests { +class MongoQueryCreatorUnitTests { - MappingContext, MongoPersistentProperty> context; - MongoConverter converter; + private MappingContext, MongoPersistentProperty> context; + private MongoConverter converter; @BeforeEach - public void beforeEach() { + void beforeEach() { context = new MongoMappingContext(); - - DbRefResolver resolver = new DefaultDbRefResolver(mock(MongoDatabaseFactory.class)); - converter = new MappingMongoConverter(resolver, context); + converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); } @Test - public void createsQueryCorrectly() { + void createsQueryCorrectly() { PartTree tree = new PartTree("findByFirstName", Person.class); @@ -92,7 +92,7 @@ public void createsQueryCorrectly() { } @Test // DATAMONGO-469 - public void createsAndQueryCorrectly() { + void createsAndQueryCorrectly() { Person person = new Person(); MongoQueryCreator creator = new MongoQueryCreator(new PartTree("findByFirstNameAndFriend", Person.class), @@ -103,7 +103,7 @@ public void createsAndQueryCorrectly() { } @Test - public void createsNotNullQueryCorrectly() { + void createsNotNullQueryCorrectly() { PartTree tree = new PartTree("findByFirstNameNotNull", Person.class); Query query = new MongoQueryCreator(tree, getAccessor(converter), context).createQuery(); @@ -112,7 +112,7 @@ public void createsNotNullQueryCorrectly() { } @Test - public void createsIsNullQueryCorrectly() { + void createsIsNullQueryCorrectly() { PartTree tree = new PartTree("findByFirstNameIsNull", Person.class); Query query = new MongoQueryCreator(tree, getAccessor(converter), context).createQuery(); @@ -121,7 +121,7 @@ public void createsIsNullQueryCorrectly() { } @Test - public void bindsMetricDistanceParameterToNearSphereCorrectly() throws Exception { + void bindsMetricDistanceParameterToNearSphereCorrectly() throws Exception { Point point = new Point(10, 20); Distance distance = new Distance(2.5, Metrics.KILOMETERS); @@ -132,7 +132,7 @@ public void bindsMetricDistanceParameterToNearSphereCorrectly() throws Exception } @Test - public void bindsDistanceParameterToNearCorrectly() throws Exception { + void bindsDistanceParameterToNearCorrectly() throws Exception { Point point = new Point(10, 20); Distance distance = new Distance(2.5); @@ -143,7 +143,7 @@ public void bindsDistanceParameterToNearCorrectly() throws Exception { } @Test - public void createsLessThanEqualQueryCorrectly() { + void createsLessThanEqualQueryCorrectly() { PartTree tree = new PartTree("findByAgeLessThanEqual", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, 18), context); @@ -153,7 +153,7 @@ public void createsLessThanEqualQueryCorrectly() { } @Test - public void createsGreaterThanEqualQueryCorrectly() { + void createsGreaterThanEqualQueryCorrectly() { PartTree tree = new PartTree("findByAgeGreaterThanEqual", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, 18), context); @@ -163,7 +163,7 @@ public void createsGreaterThanEqualQueryCorrectly() { } @Test // DATAMONGO-338 - public void createsExistsClauseCorrectly() { + void createsExistsClauseCorrectly() { PartTree tree = new PartTree("findByAgeExists", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, true), context); @@ -172,7 +172,7 @@ public void createsExistsClauseCorrectly() { } @Test // DATAMONGO-338 - public void createsRegexClauseCorrectly() { + void createsRegexClauseCorrectly() { PartTree tree = new PartTree("findByFirstNameRegex", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, ".*"), context); @@ -181,7 +181,7 @@ public void createsRegexClauseCorrectly() { } @Test // DATAMONGO-338 - public void createsTrueClauseCorrectly() { + void createsTrueClauseCorrectly() { PartTree tree = new PartTree("findByActiveTrue", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter), context); @@ -190,7 +190,7 @@ public void createsTrueClauseCorrectly() { } @Test // DATAMONGO-338 - public void createsFalseClauseCorrectly() { + void createsFalseClauseCorrectly() { PartTree tree = new PartTree("findByActiveFalse", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter), context); @@ -199,7 +199,7 @@ public void createsFalseClauseCorrectly() { } @Test // DATAMONGO-413 - public void createsOrQueryCorrectly() { + void createsOrQueryCorrectly() { PartTree tree = new PartTree("findByFirstNameOrAge", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "Dave", 42), context); @@ -209,7 +209,7 @@ public void createsOrQueryCorrectly() { } @Test // DATAMONGO-347 - public void createsQueryReferencingADBRefCorrectly() { + void createsQueryReferencingADBRefCorrectly() { User user = new User(); user.id = new ObjectId(); @@ -222,7 +222,7 @@ public void createsQueryReferencingADBRefCorrectly() { } @Test // DATAMONGO-418 - public void createsQueryWithStartingWithPredicateCorrectly() { + void createsQueryWithStartingWithPredicateCorrectly() { PartTree tree = new PartTree("findByUsernameStartingWith", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "Matt"), context); @@ -232,7 +232,7 @@ public void createsQueryWithStartingWithPredicateCorrectly() { } @Test // DATAMONGO-418 - public void createsQueryWithEndingWithPredicateCorrectly() { + void createsQueryWithEndingWithPredicateCorrectly() { PartTree tree = new PartTree("findByUsernameEndingWith", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "ews"), context); @@ -242,7 +242,7 @@ public void createsQueryWithEndingWithPredicateCorrectly() { } @Test // DATAMONGO-418 - public void createsQueryWithContainingPredicateCorrectly() { + void createsQueryWithContainingPredicateCorrectly() { PartTree tree = new PartTree("findByUsernameContaining", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "thew"), context); @@ -268,7 +268,7 @@ private void assertBindsDistanceToQuery(Point point, Distance distance, Query re } @Test // DATAMONGO-770 - public void createsQueryWithFindByIgnoreCaseCorrectly() { + void createsQueryWithFindByIgnoreCaseCorrectly() { PartTree tree = new PartTree("findByfirstNameIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); @@ -278,7 +278,7 @@ public void createsQueryWithFindByIgnoreCaseCorrectly() { } @Test // DATAMONGO-770 - public void createsQueryWithFindByNotIgnoreCaseCorrectly() { + void createsQueryWithFindByNotIgnoreCaseCorrectly() { PartTree tree = new PartTree("findByFirstNameNotIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); @@ -288,7 +288,7 @@ public void createsQueryWithFindByNotIgnoreCaseCorrectly() { } @Test // DATAMONGO-770 - public void createsQueryWithFindByStartingWithIgnoreCaseCorrectly() { + void createsQueryWithFindByStartingWithIgnoreCaseCorrectly() { PartTree tree = new PartTree("findByFirstNameStartingWithIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); @@ -298,7 +298,7 @@ public void createsQueryWithFindByStartingWithIgnoreCaseCorrectly() { } @Test // DATAMONGO-770 - public void createsQueryWithFindByEndingWithIgnoreCaseCorrectly() { + void createsQueryWithFindByEndingWithIgnoreCaseCorrectly() { PartTree tree = new PartTree("findByFirstNameEndingWithIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); @@ -308,7 +308,7 @@ public void createsQueryWithFindByEndingWithIgnoreCaseCorrectly() { } @Test // DATAMONGO-770 - public void createsQueryWithFindByContainingIgnoreCaseCorrectly() { + void createsQueryWithFindByContainingIgnoreCaseCorrectly() { PartTree tree = new PartTree("findByFirstNameContainingIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); @@ -318,7 +318,7 @@ public void createsQueryWithFindByContainingIgnoreCaseCorrectly() { } @Test // DATAMONGO-770 - public void shouldThrowExceptionForQueryWithFindByIgnoreCaseOnNonStringProperty() { + void shouldThrowExceptionForQueryWithFindByIgnoreCaseOnNonStringProperty() { PartTree tree = new PartTree("findByFirstNameAndAgeIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "foo", 42), context); @@ -328,7 +328,7 @@ public void shouldThrowExceptionForQueryWithFindByIgnoreCaseOnNonStringProperty( } @Test // DATAMONGO-770 - public void shouldOnlyGenerateLikeExpressionsForStringPropertiesIfAllIgnoreCase() { + void shouldOnlyGenerateLikeExpressionsForStringPropertiesIfAllIgnoreCase() { PartTree tree = new PartTree("findByFirstNameAndAgeAllIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave", 42), context); @@ -338,7 +338,7 @@ public void shouldOnlyGenerateLikeExpressionsForStringPropertiesIfAllIgnoreCase( } @Test // DATAMONGO-566 - public void shouldCreateDeleteByQueryCorrectly() { + void shouldCreateDeleteByQueryCorrectly() { PartTree tree = new PartTree("deleteByFirstName", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave", 42), context); @@ -350,7 +350,7 @@ public void shouldCreateDeleteByQueryCorrectly() { } @Test // DATAMONGO-566 - public void shouldCreateDeleteByQueryCorrectlyForMultipleCriteriaAndCaseExpressions() { + void shouldCreateDeleteByQueryCorrectlyForMultipleCriteriaAndCaseExpressions() { PartTree tree = new PartTree("deleteByFirstNameAndAgeAllIgnoreCase", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave", 42), context); @@ -362,7 +362,7 @@ public void shouldCreateDeleteByQueryCorrectlyForMultipleCriteriaAndCaseExpressi } @Test // DATAMONGO-1075 - public void shouldCreateInClauseWhenUsingContainsOnCollectionLikeProperty() { + void shouldCreateInClauseWhenUsingContainsOnCollectionLikeProperty() { PartTree tree = new PartTree("findByEmailAddressesContaining", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); @@ -373,7 +373,7 @@ public void shouldCreateInClauseWhenUsingContainsOnCollectionLikeProperty() { } @Test // DATAMONGO-1075 - public void shouldCreateInClauseWhenUsingNotContainsOnCollectionLikeProperty() { + void shouldCreateInClauseWhenUsingNotContainsOnCollectionLikeProperty() { PartTree tree = new PartTree("findByEmailAddressesNotContaining", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "dave"), context); @@ -384,7 +384,7 @@ public void shouldCreateInClauseWhenUsingNotContainsOnCollectionLikeProperty() { } @Test // DATAMONGO-1075, DATAMONGO-1425 - public void shouldCreateRegexWhenUsingNotContainsOnStringProperty() { + void shouldCreateRegexWhenUsingNotContainsOnStringProperty() { PartTree tree = new PartTree("findByUsernameNotContaining", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "thew"), context); @@ -395,7 +395,7 @@ public void shouldCreateRegexWhenUsingNotContainsOnStringProperty() { } @Test // DATAMONGO-1139 - public void createsNonSphericalNearForDistanceWithDefaultMetric() { + void createsNonSphericalNearForDistanceWithDefaultMetric() { Point point = new Point(1.0, 1.0); Distance distance = new Distance(1.0); @@ -408,7 +408,7 @@ public void createsNonSphericalNearForDistanceWithDefaultMetric() { } @Test // DATAMONGO-1136 - public void shouldCreateWithinQueryCorrectly() { + void shouldCreateWithinQueryCorrectly() { Point first = new Point(1, 1); Point second = new Point(2, 2); @@ -423,7 +423,7 @@ public void shouldCreateWithinQueryCorrectly() { } @Test // DATAMONGO-1110 - public void shouldCreateNearSphereQueryForSphericalProperty() { + void shouldCreateNearSphereQueryForSphericalProperty() { Point point = new Point(10, 20); @@ -435,7 +435,7 @@ public void shouldCreateNearSphereQueryForSphericalProperty() { } @Test // DATAMONGO-1110 - public void shouldCreateNearSphereQueryForSphericalPropertyHavingDistanceWithDefaultMetric() { + void shouldCreateNearSphereQueryForSphericalPropertyHavingDistanceWithDefaultMetric() { Point point = new Point(1.0, 1.0); Distance distance = new Distance(1.0); @@ -448,7 +448,7 @@ public void shouldCreateNearSphereQueryForSphericalPropertyHavingDistanceWithDef } @Test // DATAMONGO-1110 - public void shouldCreateNearQueryForMinMaxDistance() { + void shouldCreateNearQueryForMinMaxDistance() { Point point = new Point(10, 20); Range range = Distance.between(new Distance(10), new Distance(20)); @@ -461,7 +461,7 @@ public void shouldCreateNearQueryForMinMaxDistance() { } @Test // DATAMONGO-1229 - public void appliesIgnoreCaseToLeafProperty() { + void appliesIgnoreCaseToLeafProperty() { PartTree tree = new PartTree("findByAddressStreetIgnoreCase", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "Street"); @@ -470,7 +470,7 @@ public void appliesIgnoreCaseToLeafProperty() { } @Test // DATAMONGO-1232 - public void ignoreCaseShouldEscapeSource() { + void ignoreCaseShouldEscapeSource() { PartTree tree = new PartTree("findByUsernameIgnoreCase", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "con.flux+"); @@ -481,7 +481,7 @@ public void ignoreCaseShouldEscapeSource() { } @Test // DATAMONGO-1232 - public void ignoreCaseShouldEscapeSourceWhenUsedForStartingWith() { + void ignoreCaseShouldEscapeSourceWhenUsedForStartingWith() { PartTree tree = new PartTree("findByUsernameStartingWithIgnoreCase", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "dawns.light+"); @@ -492,7 +492,7 @@ public void ignoreCaseShouldEscapeSourceWhenUsedForStartingWith() { } @Test // DATAMONGO-1232 - public void ignoreCaseShouldEscapeSourceWhenUsedForEndingWith() { + void ignoreCaseShouldEscapeSourceWhenUsedForEndingWith() { PartTree tree = new PartTree("findByUsernameEndingWithIgnoreCase", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "new.ton+"); @@ -503,7 +503,7 @@ public void ignoreCaseShouldEscapeSourceWhenUsedForEndingWith() { } @Test // DATAMONGO-1232 - public void likeShouldEscapeSourceWhenUsedWithLeadingAndTrailingWildcard() { + void likeShouldEscapeSourceWhenUsedWithLeadingAndTrailingWildcard() { PartTree tree = new PartTree("findByUsernameLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "*fire.fight+*"); @@ -514,7 +514,7 @@ public void likeShouldEscapeSourceWhenUsedWithLeadingAndTrailingWildcard() { } @Test // DATAMONGO-1232 - public void likeShouldEscapeSourceWhenUsedWithLeadingWildcard() { + void likeShouldEscapeSourceWhenUsedWithLeadingWildcard() { PartTree tree = new PartTree("findByUsernameLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "*steel.heart+"); @@ -525,7 +525,7 @@ public void likeShouldEscapeSourceWhenUsedWithLeadingWildcard() { } @Test // DATAMONGO-1232 - public void likeShouldEscapeSourceWhenUsedWithTrailingWildcard() { + void likeShouldEscapeSourceWhenUsedWithTrailingWildcard() { PartTree tree = new PartTree("findByUsernameLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "cala.mity+*"); @@ -535,7 +535,7 @@ public void likeShouldEscapeSourceWhenUsedWithTrailingWildcard() { } @Test // DATAMONGO-1232 - public void likeShouldBeTreatedCorrectlyWhenUsedWithWildcardOnly() { + void likeShouldBeTreatedCorrectlyWhenUsedWithWildcardOnly() { PartTree tree = new PartTree("findByUsernameLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "*"); @@ -545,7 +545,7 @@ public void likeShouldBeTreatedCorrectlyWhenUsedWithWildcardOnly() { } @Test // DATAMONGO-1342 - public void bindsNullValueToContainsClause() { + void bindsNullValueToContainsClause() { PartTree partTree = new PartTree("emailAddressesContains", User.class); @@ -556,7 +556,7 @@ public void bindsNullValueToContainsClause() { } @Test // DATAMONGO-1424 - public void notLikeShouldEscapeSourceWhenUsedWithLeadingAndTrailingWildcard() { + void notLikeShouldEscapeSourceWhenUsedWithLeadingAndTrailingWildcard() { PartTree tree = new PartTree("findByUsernameNotLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "*fire.fight+*"); @@ -568,7 +568,7 @@ public void notLikeShouldEscapeSourceWhenUsedWithLeadingAndTrailingWildcard() { } @Test // DATAMONGO-1424 - public void notLikeShouldEscapeSourceWhenUsedWithLeadingWildcard() { + void notLikeShouldEscapeSourceWhenUsedWithLeadingWildcard() { PartTree tree = new PartTree("findByUsernameNotLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "*steel.heart+"); @@ -580,7 +580,7 @@ public void notLikeShouldEscapeSourceWhenUsedWithLeadingWildcard() { } @Test // DATAMONGO-1424 - public void notLikeShouldEscapeSourceWhenUsedWithTrailingWildcard() { + void notLikeShouldEscapeSourceWhenUsedWithTrailingWildcard() { PartTree tree = new PartTree("findByUsernameNotLike", User.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, "cala.mity+*"), context); @@ -591,7 +591,7 @@ public void notLikeShouldEscapeSourceWhenUsedWithTrailingWildcard() { } @Test // DATAMONGO-1424 - public void notLikeShouldBeTreatedCorrectlyWhenUsedWithWildcardOnly() { + void notLikeShouldBeTreatedCorrectlyWhenUsedWithWildcardOnly() { PartTree tree = new PartTree("findByUsernameNotLike", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, "*"); @@ -602,7 +602,7 @@ public void notLikeShouldBeTreatedCorrectlyWhenUsedWithWildcardOnly() { } @Test // DATAMONGO-1588 - public void queryShouldAcceptSubclassOfDeclaredArgument() { + void queryShouldAcceptSubclassOfDeclaredArgument() { PartTree tree = new PartTree("findByLocationNear", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, new GeoJsonPoint(-74.044502D, 40.689247D)); @@ -612,7 +612,7 @@ public void queryShouldAcceptSubclassOfDeclaredArgument() { } @Test // DATAMONGO-1588 - public void queryShouldThrowExceptionWhenArgumentDoesNotMatchDeclaration() { + void queryShouldThrowExceptionWhenArgumentDoesNotMatchDeclaration() { PartTree tree = new PartTree("findByLocationNear", User.class); ConvertingParameterAccessor accessor = getAccessor(converter, @@ -623,7 +623,7 @@ public void queryShouldThrowExceptionWhenArgumentDoesNotMatchDeclaration() { } @Test // DATAMONGO-2003 - public void createsRegexQueryForPatternCorrectly() { + void createsRegexQueryForPatternCorrectly() { PartTree tree = new PartTree("findByFirstNameRegex", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, getAccessor(converter, Pattern.compile(".*")), context); @@ -632,7 +632,7 @@ public void createsRegexQueryForPatternCorrectly() { } @Test // DATAMONGO-2003 - public void createsRegexQueryForPatternWithOptionsCorrectly() { + void createsRegexQueryForPatternWithOptionsCorrectly() { Pattern pattern = Pattern.compile(".*", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE); @@ -642,7 +642,7 @@ public void createsRegexQueryForPatternWithOptionsCorrectly() { } @Test // DATAMONGO-2071 - public void betweenShouldAllowSingleRageParameter() { + void betweenShouldAllowSingleRageParameter() { PartTree tree = new PartTree("findByAgeBetween", Person.class); MongoQueryCreator creator = new MongoQueryCreator(tree, @@ -652,7 +652,7 @@ public void betweenShouldAllowSingleRageParameter() { } @Test // DATAMONGO-2394 - public void nearShouldUseMetricDistanceForGeoJsonTypes() { + void nearShouldUseMetricDistanceForGeoJsonTypes() { GeoJsonPoint point = new GeoJsonPoint(27.987901, 86.9165379); PartTree tree = new PartTree("findByLocationNear", User.class); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java index fe2f447d78..c6c1b140cd 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java @@ -38,6 +38,7 @@ import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.query.TextCriteria; import org.springframework.data.mongodb.repository.MongoRepository; @@ -59,33 +60,32 @@ * @author Mark Paluch */ @ExtendWith(MockitoExtension.class) -public class PartTreeMongoQueryUnitTests { +class PartTreeMongoQueryUnitTests { @Mock MongoOperations mongoOperationsMock; @Mock ExecutableFind findOperationMock; - MongoMappingContext mappingContext; + private MongoMappingContext mappingContext; @BeforeEach - public void setUp() { + void setUp() { mappingContext = new MongoMappingContext(); - DbRefResolver dbRefResolver = new DefaultDbRefResolver(mock(MongoDatabaseFactory.class)); - MongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext); + MongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); doReturn(converter).when(mongoOperationsMock).getConverter(); doReturn(findOperationMock).when(mongoOperationsMock).query(any()); } @Test // DATAMOGO-952 - public void rejectsInvalidFieldSpecification() { + void rejectsInvalidFieldSpecification() { assertThatIllegalStateException().isThrownBy(() -> deriveQueryFromMethod("findByLastname", "foo")) .withMessageContaining("findByLastname"); } @Test // DATAMOGO-952 - public void singleFieldJsonIncludeRestrictionShouldBeConsidered() { + void singleFieldJsonIncludeRestrictionShouldBeConsidered() { org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findByFirstname", "foo"); @@ -93,7 +93,7 @@ public void singleFieldJsonIncludeRestrictionShouldBeConsidered() { } @Test // DATAMOGO-952 - public void multiFieldJsonIncludeRestrictionShouldBeConsidered() { + void multiFieldJsonIncludeRestrictionShouldBeConsidered() { org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findByFirstnameAndLastname", "foo", "bar"); @@ -102,7 +102,7 @@ public void multiFieldJsonIncludeRestrictionShouldBeConsidered() { } @Test // DATAMOGO-952 - public void multiFieldJsonExcludeRestrictionShouldBeConsidered() { + void multiFieldJsonExcludeRestrictionShouldBeConsidered() { org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findPersonByFirstnameAndLastname", "foo", "bar"); @@ -111,7 +111,7 @@ public void multiFieldJsonExcludeRestrictionShouldBeConsidered() { } @Test // DATAMOGO-973 - public void shouldAddFullTextParamCorrectlyToDerivedQuery() { + void shouldAddFullTextParamCorrectlyToDerivedQuery() { org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findPersonByFirstname", "text", TextCriteria.forDefaultLanguage().matching("search")); @@ -120,19 +120,19 @@ public void shouldAddFullTextParamCorrectlyToDerivedQuery() { } @Test // DATAMONGO-1180 - public void propagatesRootExceptionForInvalidQuery() { + void propagatesRootExceptionForInvalidQuery() { assertThatExceptionOfType(IllegalStateException.class).isThrownBy(() -> deriveQueryFromMethod("findByAge", 1)) .withCauseInstanceOf(JsonParseException.class); } @Test // DATAMONGO-1345, DATAMONGO-1735 - public void doesNotDeriveFieldSpecForNormalDomainType() { + void doesNotDeriveFieldSpecForNormalDomainType() { assertThat(deriveQueryFromMethod("findPersonBy", new Object[0]).getFieldsObject()).isEqualTo(new Document()); } @Test // DATAMONGO-1345 - public void restrictsQueryToFieldsRequiredForProjection() { + void restrictsQueryToFieldsRequiredForProjection() { Document fieldsObject = deriveQueryFromMethod("findPersonProjectedBy", new Object[0]).getFieldsObject(); @@ -141,7 +141,7 @@ public void restrictsQueryToFieldsRequiredForProjection() { } @Test // DATAMONGO-1345 - public void restrictsQueryToFieldsRequiredForDto() { + void restrictsQueryToFieldsRequiredForDto() { Document fieldsObject = deriveQueryFromMethod("findPersonDtoByAge", new Object[] { 42 }).getFieldsObject(); @@ -150,7 +150,7 @@ public void restrictsQueryToFieldsRequiredForDto() { } @Test // DATAMONGO-1345 - public void usesDynamicProjection() { + void usesDynamicProjection() { Document fields = deriveQueryFromMethod("findDynamicallyProjectedBy", ExtendedProjection.class).getFieldsObject(); @@ -160,7 +160,7 @@ public void usesDynamicProjection() { } @Test // DATAMONGO-1500 - public void shouldLeaveParameterConversionToQueryMapper() { + void shouldLeaveParameterConversionToQueryMapper() { org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findBySex", Sex.FEMALE); @@ -169,7 +169,7 @@ public void shouldLeaveParameterConversionToQueryMapper() { } @Test // DATAMONGO-1729, DATAMONGO-1735 - public void doesNotCreateFieldsObjectForOpenProjection() { + void doesNotCreateFieldsObjectForOpenProjection() { org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findAllBy"); @@ -177,12 +177,12 @@ public void doesNotCreateFieldsObjectForOpenProjection() { } @Test // DATAMONGO-1865 - public void limitingReturnsTrueIfTreeIsLimiting() { + void limitingReturnsTrueIfTreeIsLimiting() { assertThat(createQueryForMethod("findFirstBy").isLimiting()).isTrue(); } @Test // DATAMONGO-1865 - public void limitingReturnsFalseIfTreeIsNotLimiting() { + void limitingReturnsFalseIfTreeIsNotLimiting() { assertThat(createQueryForMethod("findPersonBy").isLimiting()).isFalse(); } diff --git a/spring-data-mongodb/src/test/resources/logback.xml b/spring-data-mongodb/src/test/resources/logback.xml index f154590864..a36841c97c 100644 --- a/spring-data-mongodb/src/test/resources/logback.xml +++ b/spring-data-mongodb/src/test/resources/logback.xml @@ -13,7 +13,6 @@ - diff --git a/src/main/asciidoc/new-features.adoc b/src/main/asciidoc/new-features.adoc index 842dd8341b..eac49f37bc 100644 --- a/src/main/asciidoc/new-features.adoc +++ b/src/main/asciidoc/new-features.adoc @@ -4,7 +4,7 @@ [[new-features.3.3]] == What's New in Spring Data MongoDB 3.3 -* Extended support for <> entities. +* Extended support for <> entities. [[new-features.3.2]] == What's New in Spring Data MongoDB 3.2 diff --git a/src/main/asciidoc/reference/document-references.adoc b/src/main/asciidoc/reference/document-references.adoc new file mode 100644 index 0000000000..92badd2fa1 --- /dev/null +++ b/src/main/asciidoc/reference/document-references.adoc @@ -0,0 +1,440 @@ +[[mapping-usage-references]] +=== Using DBRefs + +The mapping framework does not have to store child objects embedded within the document. +You can also store them separately and use a `DBRef` to refer to that document. +When the object is loaded from MongoDB, those references are eagerly resolved so that you get back a mapped object that looks the same as if it had been stored embedded within your top-level document. + +The following example uses a DBRef to refer to a specific document that exists independently of the object in which it is referenced (both classes are shown in-line for brevity's sake): + +==== +[source,java] +---- +@Document +public class Account { + + @Id + private ObjectId id; + private Float total; +} + +@Document +public class Person { + + @Id + private ObjectId id; + @Indexed + private Integer ssn; + @DBRef + private List accounts; +} +---- +==== + +You need not use `@OneToMany` or similar mechanisms because the List of objects tells the mapping framework that you want a one-to-many relationship. +When the object is stored in MongoDB, there is a list of DBRefs rather than the `Account` objects themselves. +When it comes to loading collections of ``DBRef``s it is advisable to restrict references held in collection types to a specific MongoDB collection. +This allows bulk loading of all references, whereas references pointing to different MongoDB collections need to be resolved one by one. + +IMPORTANT: The mapping framework does not handle cascading saves. +If you change an `Account` object that is referenced by a `Person` object, you must save the `Account` object separately. +Calling `save` on the `Person` object does not automatically save the `Account` objects in the `accounts` property. + +``DBRef``s can also be resolved lazily. +In this case the actual `Object` or `Collection` of references is resolved on first access of the property. +Use the `lazy` attribute of `@DBRef` to specify this. +Required properties that are also defined as lazy loading ``DBRef`` and used as constructor arguments are also decorated with the lazy loading proxy making sure to put as little pressure on the database and network as possible. + +TIP: Lazily loaded ``DBRef``s can be hard to debug. +Make sure tooling does not accidentally trigger proxy resolution by e.g. calling `toString()` or some inline debug rendering invoking property getters. +Please consider to enable _trace_ logging for `org.springframework.data.mongodb.core.convert.DefaultDbRefResolver` to gain insight on `DBRef` resolution. + +[[mapping-usage.document-references]] +=== Using Document References + +Using `@DocumentReference` offers a flexible way of referencing entities in MongoDB. +While the goal is the same as when using <>, the store representation is different. +`DBRef` resolves to a document with a fixed structure as outlined in the https://docs.mongodb.com/manual/reference/database-references/[MongoDB Reference documentation]. + +Document references, do not follow a specific format. +They can be literally anything, a single value, an entire document, basically everything that can be stored in MongoDB. +By default, the mapping layer will use the referenced entities _id_ value for storage and retrieval, like in the sample below. + +==== +[source,java] +---- +@Document +class Account { + + @Id + String id; + Float total; +} + +@Document +class Person { + + @Id + String id; + + @DocumentReference <1> + List accounts; +} +---- + +[source,java] +---- +Account account = … + +tempate.insert(account); <2> + +template.update(Person.class) + .matching(where("id").is(…)) + .apply(new Update().push("accounts").value(account)) <3> + .first(); +---- + +[source,json] +---- +{ + "_id" : …, + "accounts" : [ "6509b9e" … ] <4> +} +---- +<1> Mark the collection of `Account` values to be referenced. +<2> The mapping framework does not handle cascading saves, so make sure to persist the referenced entity individually. +<3> Add the reference to the existing entity. +<4> Referenced `Account` entities are represented as an array of their `_id` values. +==== + +The sample above uses an ``_id``-based fetch query (`{ '_id' : ?#{#target} }`) for data retrieval and resolves linked entities eagerly. +It is possible to alter resolution defaults (listed below) using the attributes of `@DocumentReference` + +.@DocumentReference defaults +[cols="2,3,5",options="header"] +|=== +| Attribute | Description | Default + +| `db` +| The target database name for collection lookup. +| `MongoDatabaseFactory.getMongoDatabase()` + +| `collection` +| The target collection name. +| The annotated property's domain type, respectively the value type in case of `Collection` like or `Map` properties, collection name. + +| `lookup` +| The single document lookup query evaluating placeholders via SpEL expressions using `#target` as the marker for a given source value. `Collection` like or `Map` properties combine individual lookups via an `$or` operator. +| An `_id` field based query (`{ '_id' : ?#{#target} }`) using the loaded source value. + +| `sort` +| Used for sorting result documents on server side. +| None by default. +Result order of `Collection` like properties is restored based on the used lookup query on a best-effort basis. + +| `lazy` +| If set to `true` value resolution is delayed upon first access of the property. +| Resolves properties eagerly by default. +|=== + +`@DocumentReference(lookup)` allows defining filter queries that can be different from the `_id` field and therefore offer a flexible way of defining references between entities as demonstrated in the sample below, where the `Publisher` of a book is referenced by its acronym instead of the internal `id`. + +==== +[source,java] +---- +@Document +class Book { + + @Id + ObjectId id; + String title; + List author; + + @Field("publisher_ac") + @DocumentReference(lookup = "{ 'acronym' : ?#{#target} }") <1> + Publisher publisher; +} + +@Document +class Publisher { + + @Id + ObjectId id; + String acronym; <1> + String name; + + @DocumentReference(lazy = true) <2> + List books; + +} +---- + +.`Book` document +[source,json] +---- +{ + "_id" : 9a48e32, + "title" : "The Warded Man", + "author" : ["Peter V. Brett"], + "publisher_ac" : "DR" +} +---- + +.`Publisher` document +[source,json] +---- +{ + "_id" : 1a23e45, + "acronym" : "DR", + "name" : "Del Rey", + … +} +---- +<1> Use the `acronym` field to query for entities in the `Publisher` collection. +<2> Lazy load back references to the `Book` collection. +==== + +The above snippet shows the reading side of things when working with custom referenced objects. +Writing requires a bit of additional setup as the mapping information do not express where `#target` stems from. +The mapping layer requires registration of a `Converter` between the target document and `DocumentPointer`, like the one below: + +==== +[source,java] +---- +@WritingConverter +class PublisherReferenceConverter implements Converter> { + + @Override + public DocumentPointer convert(Publisher source) { + return () -> source.getAcronym(); + } +} +---- +==== + +If no `DocumentPointer` converter is provided the target reference document can be computed based on the given lookup query. +In this case the association target properties are evaluated as shown in the following sample. + +==== +[source,java] +---- +@Document +class Book { + + @Id + ObjectId id; + String title; + List author; + + @DocumentReference(lookup = "{ 'acronym' : ?#{acc} }") <1> <2> + Publisher publisher; +} + +@Document +class Publisher { + + @Id + ObjectId id; + String acronym; <1> + String name; + + // ... +} +---- + +[source,json] +---- +{ + "_id" : 9a48e32, + "title" : "The Warded Man", + "author" : ["Peter V. Brett"], + "publisher" : { + "acc" : "DOC" + } +} +---- +<1> Use the `acronym` field to query for entities in the `Publisher` collection. +<2> The field value placeholders of the lookup query (like `acc`) is used to form the reference document. +==== + +With all the above in place it is possible to model all kind of associations between entities. +Have a look at the non-exhaustive list of samples below to get feeling for what is possible. + +.Simple Document Reference using _id_ field +==== +[source,java] +---- +class Entity { + @DocumentReference + ReferencedObject ref; +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : "9a48e32" <1> +} + +// referenced object +{ + "_id" : "9a48e32" <1> +} +---- +<1> MongoDB simple type can be directly used without further configuration. +==== + +.Simple Document Reference using _id_ field with explicit lookup query +==== +[source,java] +---- +class Entity { + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") <1> + ReferencedObject ref; +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : "9a48e32" <1> +} + +// referenced object +{ + "_id" : "9a48e32" +} +---- +<1> _target_ defines the reference value itself. +==== + +.Document Reference extracting the `refKey` field for the lookup query +==== +[source,java] +---- +class Entity { + @DocumentReference(lookup = "{ '_id' : '?#{refKey}' }") <1> <2> + private ReferencedObject ref; +} +---- + +[source,java] +---- +@WritingConverter +class ToDocumentPointerConverter implements Converter> { + public DocumentPointer convert(ReferencedObject source) { + return () -> new Document("refKey", source.id); <1> + } +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : { + "refKey" : "9a48e32" <1> + } +} + +// referenced object +{ + "_id" : "9a48e32" +} +---- +<1> The key used for obtaining the reference value must be the one used during write. +<2> `refKey` is short for `target.refKey`. +==== + +.Document Reference with multiple values forming the lookup query +==== +[source,java] +---- +class Entity { + @DocumentReference(lookup = "{ 'firstname' : '?#{fn}', 'lastname' : '?#{ln}' }") <1> <2> + ReferencedObject ref; +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : { + "fn" : "Josh", <1> + "ln" : "Long" <1> + } +} + +// referenced object +{ + "_id" : "9a48e32", + "firsntame" : "Josh", <2> + "lastname" : "Long", <2> +} +---- +<1> Read/wirte the keys `fn` & `ln` from/to the linkage document based on the lookup query. +<2> Use non _id_ fields for the lookup of the target documents. +==== + +.Document Reference reading from a target collection +==== +[source,java] +---- +class Entity { + @DocumentReference(lookup = "{ '_id' : '?#{id}' }", collection = "?#{collection}") <2> + private ReferencedObject ref; +} +---- + +[source,java] +---- +@WritingConverter +class ToDocumentPointerConverter implements Converter> { + public DocumentPointer convert(ReferencedObject source) { + return () -> new Document("id", source.id) <1> + .append("collection", … ); <2> + } +} +---- + +[source,json] +---- +// entity +{ + "_id" : "8cfb002", + "ref" : { + "id" : "9a48e32", <1> + "collection" : "…" <2> + } +} +---- +<1> Read/wirte the keys `_id` from/to the reference document to use them in the lookup query. +<2> The collection name can be read from the reference document using its key. +==== + +[WARNING] +==== +We know it is tempting to use all kinds of MongoDB query operators in the lookup query and this is fine. +But there a few aspects to consider: + +* Make sure to have indexes in place that support your lookup. +* Mind that resolution requires a server rountrip inducing latency, consider a lazy strategy. +* A collection of document references is bulk loaded using the `$or` operator. + +The original element order is restored in memory on a best-effort basis. +Restoring the order is only possible when using equality expressions and cannot be done when using MongoDB query operators. +In this case results will be ordered as they are received from the store or via the provided `@DocumentReference(sort)` attribute. + +A few more general remarks: + +* Do you use cyclic references? +Ask your self if you need them. +* Lazy document references are hard to debug. +Make sure tooling does not accidentally trigger proxy resolution by e.g. calling `toString()`. +* There is no support for reading document references using reactive infrastructure. +==== diff --git a/src/main/asciidoc/reference/mapping.adoc b/src/main/asciidoc/reference/mapping.adoc index 9f9a461ee6..f08d03d3f0 100644 --- a/src/main/asciidoc/reference/mapping.adoc +++ b/src/main/asciidoc/reference/mapping.adoc @@ -2,7 +2,10 @@ [[mapping-chapter]] = Mapping -Rich mapping support is provided by the `MappingMongoConverter`. `MappingMongoConverter` has a rich metadata model that provides a full feature set to map domain objects to MongoDB documents. The mapping metadata model is populated by using annotations on your domain objects. However, the infrastructure is not limited to using annotations as the only source of metadata information. The `MappingMongoConverter` also lets you map objects to documents without providing any additional metadata, by following a set of conventions. +Rich mapping support is provided by the `MappingMongoConverter`. `MappingMongoConverter` has a rich metadata model that provides a full feature set to map domain objects to MongoDB documents. +The mapping metadata model is populated by using annotations on your domain objects. +However, the infrastructure is not limited to using annotations as the only source of metadata information. +The `MappingMongoConverter` also lets you map objects to documents without providing any additional metadata, by following a set of conventions. This section describes the features of the `MappingMongoConverter`, including fundamentals, how to use conventions for mapping objects to documents and how to override those conventions with annotation-based mapping metadata. @@ -357,7 +360,10 @@ The `base-package` property tells it where to scan for classes annotated with th [[mapping-usage]] == Metadata-based Mapping -To take full advantage of the object mapping functionality inside the Spring Data MongoDB support, you should annotate your mapped objects with the `@Document` annotation. Although it is not necessary for the mapping framework to have this annotation (your POJOs are mapped correctly, even without any annotations), it lets the classpath scanner find and pre-process your domain objects to extract the necessary metadata. If you do not use this annotation, your application takes a slight performance hit the first time you store a domain object, because the mapping framework needs to build up its internal metadata model so that it knows about the properties of your domain object and how to persist them. The following example shows a domain object: +To take full advantage of the object mapping functionality inside the Spring Data MongoDB support, you should annotate your mapped objects with the `@Document` annotation. +Although it is not necessary for the mapping framework to have this annotation (your POJOs are mapped correctly, even without any annotations), it lets the classpath scanner find and pre-process your domain objects to extract the necessary metadata. +If you do not use this annotation, your application takes a slight performance hit the first time you store a domain object, because the mapping framework needs to build up its internal metadata model so that it knows about the properties of your domain object and how to persist them. +The following example shows a domain object: .Example domain object ==== @@ -759,7 +765,12 @@ mongoOperations.indexOpsFor(Jedi.class) NOTE: The text index feature is disabled by default for MongoDB v.2.4. -Creating a text index allows accumulating several fields into a searchable full-text index. It is only possible to have one text index per collection, so all fields marked with `@TextIndexed` are combined into this index. Properties can be weighted to influence the document score for ranking results. The default language for the text index is English. To change the default language, set the `language` attribute to whichever language you want (for example,`@Document(language="spanish")`). Using a property called `language` or `@Language` lets you define a language override on a per document base. The following example shows how to created a text index and set the language to Spanish: +Creating a text index allows accumulating several fields into a searchable full-text index. +It is only possible to have one text index per collection, so all fields marked with `@TextIndexed` are combined into this index. +Properties can be weighted to influence the document score for ranking results. +The default language for the text index is English.To change the default language, set the `language` attribute to whichever language you want (for example,`@Document(language="spanish")`). +Using a property called `language` or `@Language` lets you define a language override on a per-document base. +The following example shows how to created a text index and set the language to Spanish: .Example Text Index Usage ==== @@ -783,417 +794,7 @@ class Nested { ---- ==== -[[mapping-usage-references]] -=== Using DBRefs - -The mapping framework does not have to store child objects embedded within the document. -You can also store them separately and use a DBRef to refer to that document. -When the object is loaded from MongoDB, those references are eagerly resolved so that you get back a mapped object that looks the same as if it had been stored embedded within your top-level document. - -The following example uses a DBRef to refer to a specific document that exists independently of the object in which it is referenced (both classes are shown in-line for brevity's sake): - -==== -[source,java] ----- -@Document -public class Account { - - @Id - private ObjectId id; - private Float total; -} - -@Document -public class Person { - - @Id - private ObjectId id; - @Indexed - private Integer ssn; - @DBRef - private List accounts; -} ----- -==== - -You need not use `@OneToMany` or similar mechanisms because the List of objects tells the mapping framework that you want a one-to-many relationship. When the object is stored in MongoDB, there is a list of DBRefs rather than the `Account` objects themselves. -When it comes to loading collections of ``DBRef``s it is advisable to restrict references held in collection types to a specific MongoDB collection. This allows bulk loading of all references, whereas references pointing to different MongoDB collections need to be resolved one by one. - -IMPORTANT: The mapping framework does not handle cascading saves. If you change an `Account` object that is referenced by a `Person` object, you must save the `Account` object separately. Calling `save` on the `Person` object does not automatically save the `Account` objects in the `accounts` property. - -``DBRef``s can also be resolved lazily. In this case the actual `Object` or `Collection` of references is resolved on first access of the property. Use the `lazy` attribute of `@DBRef` to specify this. -Required properties that are also defined as lazy loading ``DBRef`` and used as constructor arguments are also decorated with the lazy loading proxy making sure to put as little pressure on the database and network as possible. - -TIP: Lazily loaded ``DBRef``s can be hard to debug. Make sure tooling does not accidentally trigger proxy resolution by eg. calling `toString()` or some inline debug rendering invoking property getters. -Please consider to enable _trace_ logging for `org.springframework.data.mongodb.core.convert.DefaultDbRefResolver` to gain insight on `DBRef` resolution. - -[[mapping-usage.linking]] -=== Using Document References - -Using `@DocumentReference` offers an alternative way of linking entities in MongoDB. -While the goal is the same as when using <>, the store representation is different. -`DBRef` resolves to a document with a fixed structure as outlined in the https://docs.mongodb.com/manual/reference/database-references/[MongoDB Reference documentation]. + -Document references, do not follow a specific format. -They can be literally anything, a single value, an entire document, basically everything that can be stored in MongoDB. -By default, the mapping layer will use the referenced entities _id_ value for storage and retrieval, like in the sample below. - -==== -[source,java] ----- -@Document -public class Account { - - @Id - private String id; - private Float total; -} - -@Document -public class Person { - - @Id - private String id; - - @DocumentReference <1> - private List accounts; -} ----- -[source,java] ----- -Account account = ... - -tempate.insert(account); <2> - -template.update(Person.class) - .matching(where("id").is(...)) - .apply(new Update().push("accounts").value(account)) <3> - .first(); ----- -[source,json] ----- -{ - "_id" : ..., - "accounts" : [ "6509b9e", ... ] <4> -} ----- -<1> Mark the collection of `Account` values to be linked. -<2> The mapping framework does not handle cascading saves, so make sure to persist the referenced entity individually. -<3> Add the reference to the existing entity. -<4> Linked `Account` entities are represented as an array of their `_id` values. -==== - -The sample above uses an `_id` based fetch query (`{ '_id' : ?#{#target} }`) for data retrieval and resolves linked entities eagerly. -It is possible to alter resolution defaults (listed below) via the attributes of `@DocumentReference` - -.@DocumentReference defaults -[cols="2,3,5", options="header"] -|=== -| Attribute | Description | Default - -| `db` -| The target database name for collection lookup. -| The configured database provided by `MongoDatabaseFactory.getMongoDatabase()`. - -| `collection` -| The target collection name. -| The annotated properties domain type, respectively the value type in case of `Collection` like or `Map` properties, collection name. - -| `lookup` -| The single document lookup query evaluating placeholders via SpEL expressions using `#target` as the marker for a given source value. `Collection` like or `Map` properties combine individual lookups via an `$or` operator. -| An `_id` field based query (`{ '_id' : ?#{#target} }`) using the loaded source value. - -| `sort` -| Used for sorting result documents on server side. -| None by default. Result order of `Collection` like properties is restored based on the used lookup query. - -| `lazy` -| If set to `true` value resolution is delayed upon first access of the property. -| Resolves properties eagerly by default. -|=== - -`@DocumentReference(lookup=...)` allows to define custom queries that are independent from the `_id` field and therefore offer a flexible way of defining links between entities as demonstrated in the sample below, where the `Publisher` of a book is referenced by its acronym instead of the internal `id`. - -==== -[source,java] ----- -@Document -public class Book { - - @Id - private ObjectId id; - private String title; - private List author; - - @Field("publisher_ac") - @DocumentReference(lookup = "{ 'acronym' : ?#{#target} }") <1> - private Publisher publisher; -} - -@Document -public class Publisher { - - @Id - private ObjectId id; - private String acronym; <1> - private String name; - - @DocumentReference(lazy = true) <2> - private List books; - -} ----- -[source,json] ----- -{ - "_id" : 9a48e32, - "title" : "The Warded Man", - "author" : ["Peter V. Brett"], - "publisher_ac" : "DR" -} ----- -<1> Use the `acronym` field to query for entities in the `Publisher` collection. -<2> Lazy load back references to the `Book` collection. -==== - -The above snipped shows the reading side of things when working with custom linked objects. -To make the writing part aware of the modified document pointer a custom converter, capable of the transformation into a `DocumentPointer`, like the one below, needs to be registered. - -==== -[source,java] ----- -@WritingConverter -class PublisherReferenceConverter implements Converter> { - - @Override - public DocumentPointer convert(Publisher source) { - return () -> source.getAcronym(); - } -} ----- -==== - -If no `DocumentPointer` converter is provided the target linkage document can be computed based on the given lookup query. -In this case the association target properties are evaluated as shown in the following sample. - -==== -[source,java] ----- -@Document -public class Book { - - @Id - private ObjectId id; - private String title; - private List author; - - @DocumentReference(lookup = "{ 'acronym' : ?#{acc} }") <1> <2> - private Publisher publisher; -} - -@Document -public class Publisher { - - @Id - private ObjectId id; - private String acronym; <1> - private String name; - - // ... -} ----- -[source,json] ----- -{ - "_id" : 9a48e32, - "title" : "The Warded Man", - "author" : ["Peter V. Brett"], - "publisher" : { - "acc" : "DOC" - } -} ----- -<1> Use the `acronym` field to query for entities in the `Publisher` collection. -<2> The field value placeholders of the lookup query (like `acc`) is used to form the linkage document. -==== - -With all the above in place it is possible to model all kind of associations between entities. -Have a look at the non exhaustive list of samples below to get feeling for what is possible. - -.Simple Document Reference using _id_ field -==== -[source,java] ----- -class Entity { - @DocumentReference - private ReferencedObject ref; -} ----- - -[source,json] ----- -// entity -{ - "_id" : "8cfb002", - "ref" : "9a48e32" <1> -} - -// referenced object -{ - "_id" : "9a48e32" <1> -} ----- -<1> MongoDB simple type can be directly used without further configuration. -==== - -.Simple Document Reference using _id_ field with explicit lookup query -==== -[source,java] ----- -class Entity { - @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") <1> - private ReferencedObject ref; -} ----- - -[source,json] ----- -// entity -{ - "_id" : "8cfb002", - "ref" : "9a48e32" <1> -} - -// referenced object -{ - "_id" : "9a48e32" -} ----- -<1> _target_ defines the linkage value itself. -==== - -.Document Reference extracting field of linkage document for lookup query -==== -[source,java] ----- -class Entity { - @DocumentReference(lookup = "{ '_id' : '?#{refKey}' }") <1> <2> - private ReferencedObject ref; -} ----- - -[source,java] ----- -@WritingConverter -class ToDocumentPointerConverter implements Converter> { - public DocumentPointer convert(ReferencedObject source) { - return () -> new Document("refKey", source.id); <1> - } -} ----- - -[source,json] ----- -// entity -{ - "_id" : "8cfb002", - "ref" : { - "refKey" : "9a48e32" <1> - } -} - -// referenced object -{ - "_id" : "9a48e32" -} ----- -<1> The key used for obtaining the linkage value must be the one used during write. -<2> `refKey` is short for `target.refKey`. -==== - -.Document Reference with multiple values forming the lookup query -==== -[source,java] ----- -class Entity { - @DocumentReference(lookup = "{ 'firstname' : '?#{fn}', 'lastname' : '?#{ln}' }") <1> <2> - private ReferencedObject ref; -} ----- - -[source,json] ----- -// entity -{ - "_id" : "8cfb002", - "ref" : { - "fn" : "Josh", <1> - "ln" : "Long" <1> - } -} - -// referenced object -{ - "_id" : "9a48e32", - "firsntame" : "Josh", <2> - "lastname" : "Long", <2> -} ----- -<1> Read/wirte the keys `fn` & `ln` from/to the linkage document based on the lookup query. -<2> Use non _id_ fields for the lookup of the target documents. -==== - -.Document Reference reading target collection from linkage document -==== -[source,java] ----- -class Entity { - @DocumentReference(lookup = "{ '_id' : '?#{id}' }", collection = "?#{collection}") <2> - private ReferencedObject ref; -} ----- - -[source,java] ----- -@WritingConverter -class ToDocumentPointerConverter implements Converter> { - public DocumentPointer convert(ReferencedObject source) { - return () -> new Document("id", source.id) <1> - .append("collection", ... ); <2> - } -} ----- - -[source,json] ----- -// entity -{ - "_id" : "8cfb002", - "ref" : { - "id" : "9a48e32", <1> - "collection" : "..." <2> - } -} ----- -<1> Read/wirte the keys `_id` from/to the linkage document to use them in the lookup query. -<2> The collection name can be read from the linkage document via its key. -==== - -[WARNING] -==== -We know it is tempting to use all kinds of MongoDB query operators in the lookup query and this is fine. But: - -* Make sure to have indexes in place that support your lookup. -* Mind that resolution takes time and consider a lazy strategy. -* A collection of document references is bulk loaded using an `$or` operator. + -The original element order is restored in memory which cannot be done when using MongoDB query operators. -In this case Results will be ordered as they are received from the store or via the provided `@DocumentReference(sort = ...)` attribute. - -And a few more general remarks: - -* Cyclic references? Ask your self if you need them. -* Lazy document references are hard to debug. Make sure tooling does not accidentally trigger proxy resolution by eg. calling `toString()`. -* There is no support for reading document references via the reactive bits Spring Data MongoDB offers. -==== +include::document-references.adoc[] [[mapping-usage-events]] === Mapping Framework Events From dae0ac3b4dc1a3fedffd39571352b0323464d03d Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 21 May 2021 10:56:58 +0200 Subject: [PATCH 021/983] Remove duplicate LazyLoadingInterceptor code by reusing LazyLoadingProxyFactory. Original pull request: #3647. Closes #3602. --- .../core/convert/DefaultDbRefResolver.java | 309 +----------------- .../convert/DefaultReferenceResolver.java | 23 +- .../core/convert/LazyLoadingProxy.java | 4 +- .../core/convert/LazyLoadingProxyFactory.java | 138 +++++--- .../core/convert/ReferenceResolver.java | 14 +- .../DbRefMappingMongoConverterUnitTests.java | 78 ++--- .../DefaultDbRefResolverUnitTests.java | 2 + .../LazyLoadingInterceptorUnitTests.java | 8 +- .../core/convert/LazyLoadingTestUtils.java | 8 +- 9 files changed, 173 insertions(+), 411 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java index f64c7f0f06..70fc880cc1 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolver.java @@ -15,13 +15,6 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.springframework.util.ReflectionUtils.*; - -import java.io.IOException; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.Serializable; -import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -29,30 +22,18 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import org.aopalliance.intercept.MethodInterceptor; -import org.aopalliance.intercept.MethodInvocation; import org.bson.Document; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.aop.framework.ProxyFactory; -import org.springframework.cglib.proxy.Callback; -import org.springframework.cglib.proxy.Enhancer; -import org.springframework.cglib.proxy.Factory; -import org.springframework.cglib.proxy.MethodProxy; -import org.springframework.dao.DataAccessException; + import org.springframework.dao.InvalidDataAccessApiUsageException; -import org.springframework.dao.support.PersistenceExceptionTranslator; -import org.springframework.data.mongodb.ClientSessionException; -import org.springframework.data.mongodb.LazyLoadingException; import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.MongoDatabaseUtils; import org.springframework.data.mongodb.core.convert.ReferenceLoader.DocumentReferenceQuery; import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.lang.Nullable; -import org.springframework.objenesis.ObjenesisStd; import org.springframework.util.Assert; -import org.springframework.util.ReflectionUtils; import org.springframework.util.StringUtils; import com.mongodb.DBRef; @@ -74,8 +55,6 @@ public class DefaultDbRefResolver extends DefaultReferenceResolver implements Db private static final Logger LOGGER = LoggerFactory.getLogger(DefaultDbRefResolver.class); private final MongoDatabaseFactory mongoDbFactory; - private final PersistenceExceptionTranslator exceptionTranslator; - private final ObjenesisStd objenesis; /** * Creates a new {@link DefaultDbRefResolver} with the given {@link MongoDatabaseFactory}. @@ -84,13 +63,11 @@ public class DefaultDbRefResolver extends DefaultReferenceResolver implements Db */ public DefaultDbRefResolver(MongoDatabaseFactory mongoDbFactory) { - super(new MongoDatabaseFactoryReferenceLoader(mongoDbFactory)); + super(new MongoDatabaseFactoryReferenceLoader(mongoDbFactory), mongoDbFactory.getExceptionTranslator()); Assert.notNull(mongoDbFactory, "MongoDbFactory translator must not be null!"); this.mongoDbFactory = mongoDbFactory; - this.exceptionTranslator = mongoDbFactory.getExceptionTranslator(); - this.objenesis = new ObjenesisStd(true); } /* @@ -180,44 +157,9 @@ public List bulkFetch(List refs) { private Object createLazyLoadingProxy(MongoPersistentProperty property, @Nullable DBRef dbref, DbRefResolverCallback callback, DbRefProxyHandler handler) { - Class propertyType = property.getType(); - LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, dbref, exceptionTranslator, callback); - - if (!propertyType.isInterface()) { - - Factory factory = (Factory) objenesis.newInstance(getEnhancedTypeFor(propertyType)); - factory.setCallbacks(new Callback[] { interceptor }); - - return handler.populateId(property, dbref, factory); - } - - ProxyFactory proxyFactory = new ProxyFactory(); - - for (Class type : propertyType.getInterfaces()) { - proxyFactory.addInterface(type); - } - - proxyFactory.addInterface(LazyLoadingProxy.class); - proxyFactory.addInterface(propertyType); - proxyFactory.addAdvice(interceptor); + Object lazyLoadingProxy = getProxyFactory().createLazyLoadingProxy(property, callback, dbref); - return handler.populateId(property, dbref, proxyFactory.getProxy(LazyLoadingProxy.class.getClassLoader())); - } - - /** - * Returns the CGLib enhanced type for the given source type. - * - * @param type - * @return - */ - private Class getEnhancedTypeFor(Class type) { - - Enhancer enhancer = new Enhancer(); - enhancer.setSuperclass(type); - enhancer.setCallbackType(org.springframework.cglib.proxy.MethodInterceptor.class); - enhancer.setInterfaces(new Class[] { LazyLoadingProxy.class }); - - return enhancer.createClass(); + return handler.populateId(property, dbref, lazyLoadingProxy); } /** @@ -244,249 +186,6 @@ private static Stream documentWithId(Object identifier, Collection getReferenceLoader().fetchMany(filter, ctx); private final LookupFunction singleValueLookupFunction = (filter, ctx) -> { @@ -43,13 +47,17 @@ public class DefaultReferenceResolver implements ReferenceResolver { /** * Create a new instance of {@link DefaultReferenceResolver}. - * + * * @param referenceLoader must not be {@literal null}. + * @param exceptionTranslator must not be {@literal null}. */ - public DefaultReferenceResolver(ReferenceLoader referenceLoader) { - + public DefaultReferenceResolver(ReferenceLoader referenceLoader, PersistenceExceptionTranslator exceptionTranslator) { + Assert.notNull(referenceLoader, "ReferenceLoader must not be null!"); + Assert.notNull(exceptionTranslator, "ExceptionTranslator must not be null!"); + this.referenceLoader = referenceLoader; + this.proxyFactory = new LazyLoadingProxyFactory(exceptionTranslator); } @Override @@ -92,9 +100,14 @@ protected ReferenceLoader getReferenceLoader() { return referenceLoader; } + LazyLoadingProxyFactory getProxyFactory() { + return proxyFactory; + } + private Object createLazyLoadingProxy(MongoPersistentProperty property, Object source, ReferenceLookupDelegate referenceLookupDelegate, LookupFunction lookupFunction, MongoEntityReader entityReader) { - return new LazyLoadingProxyFactory(referenceLookupDelegate).createLazyLoadingProxy(property, source, lookupFunction, - entityReader); + return proxyFactory.createLazyLoadingProxy(property, it -> { + return referenceLookupDelegate.readReference(it, source, lookupFunction, entityReader); + }, source); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxy.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxy.java index 8be7111988..a2a2df8c86 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxy.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxy.java @@ -15,18 +15,18 @@ */ package org.springframework.data.mongodb.core.convert; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.LazyLoadingInterceptor; import org.springframework.lang.Nullable; import com.mongodb.DBRef; /** - * Allows direct interaction with the underlying {@link LazyLoadingInterceptor}. + * Allows direct interaction with the underlying {@code LazyLoadingInterceptor}. * * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch * @since 1.5 + * @see LazyLoadingProxyFactory */ public interface LazyLoadingProxy { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java index 8c2156df2e..f77b96c71f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java @@ -15,46 +15,59 @@ */ package org.springframework.data.mongodb.core.convert; -import static org.springframework.data.mongodb.core.convert.ReferenceLookupDelegate.*; import static org.springframework.util.ReflectionUtils.*; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; import java.io.Serializable; import java.lang.reflect.Method; import org.aopalliance.intercept.MethodInterceptor; import org.aopalliance.intercept.MethodInvocation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.aop.framework.ProxyFactory; import org.springframework.cglib.proxy.Callback; import org.springframework.cglib.proxy.Enhancer; import org.springframework.cglib.proxy.Factory; import org.springframework.cglib.proxy.MethodProxy; -import org.springframework.data.mongodb.core.convert.ReferenceResolver.MongoEntityReader; +import org.springframework.dao.DataAccessException; +import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.springframework.data.mongodb.ClientSessionException; +import org.springframework.data.mongodb.LazyLoadingException; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.lang.Nullable; import org.springframework.objenesis.ObjenesisStd; import org.springframework.util.ReflectionUtils; +import com.mongodb.DBRef; + /** + * {@link ProxyFactory} to create a proxy for {@link MongoPersistentProperty#getType()} to resolve a reference lazily. + * * @author Christoph Strobl + * @author Mark Paluch */ class LazyLoadingProxyFactory { + private static final Logger LOGGER = LoggerFactory.getLogger(LazyLoadingProxyFactory.class); + private final ObjenesisStd objenesis; - private final ReferenceLookupDelegate lookupDelegate; - public LazyLoadingProxyFactory(ReferenceLookupDelegate lookupDelegate) { + private final PersistenceExceptionTranslator exceptionTranslator; - this.lookupDelegate = lookupDelegate; + public LazyLoadingProxyFactory(PersistenceExceptionTranslator exceptionTranslator) { + this.exceptionTranslator = exceptionTranslator; this.objenesis = new ObjenesisStd(true); } - public Object createLazyLoadingProxy(MongoPersistentProperty property, Object source, LookupFunction lookupFunction, - MongoEntityReader entityReader) { + public Object createLazyLoadingProxy(MongoPersistentProperty property, DbRefResolverCallback callback, + Object source) { Class propertyType = property.getType(); - LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, source, lookupDelegate, lookupFunction, - entityReader); + LazyLoadingInterceptor interceptor = new LazyLoadingInterceptor(property, callback, source, exceptionTranslator); if (!propertyType.isInterface()) { @@ -96,17 +109,9 @@ private Class getEnhancedTypeFor(Class type) { public static class LazyLoadingInterceptor implements MethodInterceptor, org.springframework.cglib.proxy.MethodInterceptor, Serializable { - private final ReferenceLookupDelegate referenceLookupDelegate; - private final MongoPersistentProperty property; - private volatile boolean resolved; - private @Nullable Object result; - private final Object source; - private final LookupFunction lookupFunction; - private final MongoEntityReader entityReader; - - private final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD, GET_SOURCE_METHOD; + private static final Method INITIALIZE_METHOD, TO_DBREF_METHOD, FINALIZE_METHOD, GET_SOURCE_METHOD; - { + static { try { INITIALIZE_METHOD = LazyLoadingProxy.class.getMethod("getTarget"); TO_DBREF_METHOD = LazyLoadingProxy.class.getMethod("toDBRef"); @@ -117,14 +122,20 @@ public static class LazyLoadingInterceptor } } - public LazyLoadingInterceptor(MongoPersistentProperty property, Object source, ReferenceLookupDelegate reader, - LookupFunction lookupFunction, MongoEntityReader entityReader) { + private final MongoPersistentProperty property; + private final DbRefResolverCallback callback; + private final Object source; + private final PersistenceExceptionTranslator exceptionTranslator; + private volatile boolean resolved; + private @Nullable Object result; + + public LazyLoadingInterceptor(MongoPersistentProperty property, DbRefResolverCallback callback, Object source, + PersistenceExceptionTranslator exceptionTranslator) { this.property = property; + this.callback = callback; this.source = source; - this.referenceLookupDelegate = reader; - this.lookupFunction = lookupFunction; - this.entityReader = entityReader; + this.exceptionTranslator = exceptionTranslator; } @Nullable @@ -142,7 +153,7 @@ public Object intercept(Object o, Method method, Object[] args, MethodProxy prox } if (TO_DBREF_METHOD.equals(method)) { - return null; + return source instanceof DBRef ? source : null; } if (GET_SOURCE_METHOD.equals(method)) { @@ -152,7 +163,7 @@ public Object intercept(Object o, Method method, Object[] args, MethodProxy prox if (isObjectMethod(method) && Object.class.equals(method.getDeclaringClass())) { if (ReflectionUtils.isToStringMethod(method)) { - return proxyToString(proxy); + return proxyToString(source); } if (ReflectionUtils.isEqualsMethod(method)) { @@ -160,7 +171,7 @@ public Object intercept(Object o, Method method, Object[] args, MethodProxy prox } if (ReflectionUtils.isHashCodeMethod(method)) { - return proxyHashCode(proxy); + return proxyHashCode(); } // DATAMONGO-1076 - finalize methods should not trigger proxy initialization @@ -195,7 +206,13 @@ private String proxyToString(@Nullable Object source) { StringBuilder description = new StringBuilder(); if (source != null) { - description.append(source); + if (source instanceof DBRef) { + description.append(((DBRef) source).getCollectionName()); + description.append(":"); + description.append(((DBRef) source).getId()); + } else { + description.append(source); + } } else { description.append(System.identityHashCode(source)); } @@ -217,8 +234,36 @@ private boolean proxyEquals(@Nullable Object proxy, Object that) { return proxyToString(proxy).equals(that.toString()); } - private int proxyHashCode(@Nullable Object proxy) { - return proxyToString(proxy).hashCode(); + private int proxyHashCode() { + return proxyToString(source).hashCode(); + } + + /** + * Callback method for serialization. + * + * @param out + * @throws IOException + */ + private void writeObject(ObjectOutputStream out) throws IOException { + + ensureResolved(); + out.writeObject(this.result); + } + + /** + * Callback method for deserialization. + * + * @param in + * @throws IOException + */ + private void readObject(ObjectInputStream in) throws IOException { + + try { + this.resolved = true; + this.result = in.readObject(); + } catch (ClassNotFoundException e) { + throw new LazyLoadingException("Could not deserialize result", e); + } } @Nullable @@ -226,32 +271,31 @@ private synchronized Object resolve() { if (resolved) { - // if (LOGGER.isTraceEnabled()) { - // LOGGER.trace("Accessing already resolved lazy loading property {}.{}", - // property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName()); - // } + if (LOGGER.isTraceEnabled()) { + LOGGER.trace("Accessing already resolved lazy loading property {}.{}", + property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName()); + } return result; } try { - // if (LOGGER.isTraceEnabled()) { - // LOGGER.trace("Resolving lazy loading property {}.{}", - // property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName()); - // } + if (LOGGER.isTraceEnabled()) { + LOGGER.trace("Resolving lazy loading property {}.{}", + property.getOwner() != null ? property.getOwner().getName() : "unknown", property.getName()); + } - return referenceLookupDelegate.readReference(property, source, lookupFunction, entityReader); + return callback.resolve(property); } catch (RuntimeException ex) { - throw ex; - // DataAccessException translatedException = this.exceptionTranslator.translateExceptionIfPossible(ex); - // - // if (translatedException instanceof ClientSessionException) { - // throw new LazyLoadingException("Unable to lazily resolve DBRef! Invalid session state.", ex); - // } + DataAccessException translatedException = exceptionTranslator.translateExceptionIfPossible(ex); + + if (translatedException instanceof ClientSessionException) { + throw new LazyLoadingException("Unable to lazily resolve DBRef! Invalid session state.", ex); + } - // throw new LazyLoadingException("Unable to lazily resolve DBRef!", - // translatedException != null ? translatedException : ex); + throw new LazyLoadingException("Unable to lazily resolve DBRef!", + translatedException != null ? translatedException : ex); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java index 91235b5270..c2cf676604 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceResolver.java @@ -15,6 +15,7 @@ */ package org.springframework.data.mongodb.core.convert; +import org.springframework.data.mapping.PersistentProperty; import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.util.TypeInformation; @@ -27,12 +28,15 @@ * The {@link ReferenceResolver} allows to load and convert linked entities. * * @author Christoph Strobl + * @since 3.3 */ +@FunctionalInterface public interface ReferenceResolver { /** - * Resolve the association defined via the given property from a given source value. May deliver a - * {@link LazyLoadingProxy proxy instance} in case of a lazy loading association. + * Resolve the association defined via the given property from a given source value. May return a + * {@link LazyLoadingProxy proxy instance} in case of a lazy loading association. The resolved value is assignable to + * {@link PersistentProperty#getType()}. * * @param property the association defining property. * @param source the association source value. @@ -79,7 +83,7 @@ public static ReferenceCollection fromDBRef(DBRef dbRef) { /** * Get the target collection name. - * + * * @return never {@literal null}. */ public String getCollection() { @@ -98,7 +102,7 @@ public String getDatabase() { } /** - * Domain type conversion callback interface that allows to read + * Domain type conversion callback interface that allows to read the {@code source} object into a mapped object. */ @FunctionalInterface interface MongoEntityReader { @@ -107,7 +111,7 @@ interface MongoEntityReader { * Read values from the given source into an object defined via the given {@link TypeInformation}. * * @param source never {@literal null}. - * @param typeInformation information abount the desired target type. + * @param typeInformation information about the desired target type. * @return never {@literal null}. */ Object read(Object source, TypeInformation typeInformation); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java index 84e7e2c2d8..d5285e7d2e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java @@ -70,16 +70,16 @@ * @author Mark Paluch */ @ExtendWith(MockitoExtension.class) -public class DbRefMappingMongoConverterUnitTests { +class DbRefMappingMongoConverterUnitTests { - MappingMongoConverter converter; - MongoMappingContext mappingContext; + private MappingMongoConverter converter; + private MongoMappingContext mappingContext; @Mock MongoDatabaseFactory dbFactory; - DefaultDbRefResolver dbRefResolver; + private DefaultDbRefResolver dbRefResolver; @BeforeEach - public void setUp() { + void setUp() { when(dbFactory.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); @@ -89,7 +89,7 @@ public void setUp() { } @Test // DATAMONGO-347 - public void createsSimpleDBRefCorrectly() { + void createsSimpleDBRefCorrectly() { Person person = new Person(); person.id = "foo"; @@ -100,7 +100,7 @@ public void createsSimpleDBRefCorrectly() { } @Test // DATAMONGO-657 - public void convertDocumentWithMapDBRef() { + void convertDocumentWithMapDBRef() { Document mapValDocument = new Document(); mapValDocument.put("_id", BigInteger.ONE); @@ -145,7 +145,7 @@ public void convertDocumentWithMapDBRef() { } @Test // DATAMONGO-347 - public void createsDBRefWithClientSpecCorrectly() { + void createsDBRefWithClientSpecCorrectly() { PropertyPath path = PropertyPath.from("person", PersonClient.class); MongoPersistentProperty property = mappingContext.getPersistentPropertyPath(path).getLeafProperty(); @@ -159,7 +159,7 @@ public void createsDBRefWithClientSpecCorrectly() { } @Test // DATAMONGO-348 - public void lazyLoadingProxyForLazyDbRefOnInterface() { + void lazyLoadingProxyForLazyDbRefOnInterface() { String id = "42"; String value = "bubu"; @@ -180,7 +180,7 @@ public void lazyLoadingProxyForLazyDbRefOnInterface() { } @Test // DATAMONGO-348 - public void lazyLoadingProxyForLazyDbRefOnConcreteCollection() { + void lazyLoadingProxyForLazyDbRefOnConcreteCollection() { String id = "42"; String value = "bubu"; @@ -201,7 +201,7 @@ public void lazyLoadingProxyForLazyDbRefOnConcreteCollection() { } @Test // DATAMONGO-348 - public void lazyLoadingProxyForLazyDbRefOnConcreteType() { + void lazyLoadingProxyForLazyDbRefOnConcreteType() { String id = "42"; String value = "bubu"; @@ -222,7 +222,7 @@ public void lazyLoadingProxyForLazyDbRefOnConcreteType() { } @Test // DATAMONGO-348 - public void lazyLoadingProxyForLazyDbRefOnConcreteTypeWithPersistenceConstructor() { + void lazyLoadingProxyForLazyDbRefOnConcreteTypeWithPersistenceConstructor() { String id = "42"; String value = "bubu"; @@ -243,7 +243,7 @@ public void lazyLoadingProxyForLazyDbRefOnConcreteTypeWithPersistenceConstructor } @Test // DATAMONGO-348 - public void lazyLoadingProxyForLazyDbRefOnConcreteTypeWithPersistenceConstructorButWithoutDefaultConstructor() { + void lazyLoadingProxyForLazyDbRefOnConcreteTypeWithPersistenceConstructorButWithoutDefaultConstructor() { String id = "42"; String value = "bubu"; @@ -266,7 +266,7 @@ public void lazyLoadingProxyForLazyDbRefOnConcreteTypeWithPersistenceConstructor } @Test // DATAMONGO-348 - public void lazyLoadingProxyForSerializableLazyDbRefOnConcreteType() { + void lazyLoadingProxyForSerializableLazyDbRefOnConcreteType() { String id = "42"; String value = "bubu"; @@ -288,7 +288,7 @@ public void lazyLoadingProxyForSerializableLazyDbRefOnConcreteType() { } @Test // DATAMONGO-884 - public void lazyLoadingProxyForToStringObjectMethodOverridingDbref() { + void lazyLoadingProxyForToStringObjectMethodOverridingDbref() { String id = "42"; String value = "bubu"; @@ -309,7 +309,7 @@ public void lazyLoadingProxyForToStringObjectMethodOverridingDbref() { } @Test // DATAMONGO-884 - public void callingToStringObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { + void callingToStringObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { String id = "42"; String value = "bubu"; @@ -337,7 +337,7 @@ public void callingToStringObjectMethodOnLazyLoadingDbrefShouldNotInitializeProx } @Test // DATAMONGO-884 - public void equalsObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { + void equalsObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { String id = "42"; String value = "bubu"; @@ -362,7 +362,7 @@ public void equalsObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { } @Test // DATAMONGO-884 - public void hashcodeObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { + void hashcodeObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { String id = "42"; String value = "bubu"; @@ -385,7 +385,7 @@ public void hashcodeObjectMethodOnLazyLoadingDbrefShouldNotInitializeProxy() { } @Test // DATAMONGO-884 - public void lazyLoadingProxyForEqualsAndHashcodeObjectMethodOverridingDbref() { + void lazyLoadingProxyForEqualsAndHashcodeObjectMethodOverridingDbref() { String id = "42"; String value = "bubu"; @@ -414,7 +414,7 @@ public void lazyLoadingProxyForEqualsAndHashcodeObjectMethodOverridingDbref() { } @Test // DATAMONGO-987 - public void shouldNotGenerateLazyLoadingProxyForNullValues() { + void shouldNotGenerateLazyLoadingProxyForNullValues() { Document document = new Document(); ClassWithLazyDbRefs lazyDbRefs = new ClassWithLazyDbRefs(); @@ -432,7 +432,7 @@ public void shouldNotGenerateLazyLoadingProxyForNullValues() { } @Test // DATAMONGO-1005 - public void shouldBeAbleToStoreDirectReferencesToSelf() { + void shouldBeAbleToStoreDirectReferencesToSelf() { Document document = new Document(); @@ -448,7 +448,7 @@ public void shouldBeAbleToStoreDirectReferencesToSelf() { } @Test // DATAMONGO-1005 - public void shouldBeAbleToStoreNestedReferencesToSelf() { + void shouldBeAbleToStoreNestedReferencesToSelf() { Document document = new Document(); @@ -467,7 +467,7 @@ public void shouldBeAbleToStoreNestedReferencesToSelf() { } @Test // DATAMONGO-1012 - public void shouldEagerlyResolveIdPropertyWithFieldAccess() { + void shouldEagerlyResolveIdPropertyWithFieldAccess() { MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(ClassWithLazyDbRefs.class); MongoPersistentProperty property = entity.getRequiredPersistentProperty("dbRefToConcreteType"); @@ -489,7 +489,7 @@ public void shouldEagerlyResolveIdPropertyWithFieldAccess() { } @Test // DATAMONGO-1012 - public void shouldNotEagerlyResolveIdPropertyWithPropertyAccess() { + void shouldNotEagerlyResolveIdPropertyWithPropertyAccess() { MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(ClassWithLazyDbRefs.class); MongoPersistentProperty property = entity.getRequiredPersistentProperty("dbRefToConcreteTypeWithPropertyAccess"); @@ -507,7 +507,7 @@ public void shouldNotEagerlyResolveIdPropertyWithPropertyAccess() { } @Test // DATAMONGO-1076 - public void shouldNotTriggerResolvingOfLazyLoadedProxyWhenFinalizeMethodIsInvoked() throws Exception { + void shouldNotTriggerResolvingOfLazyLoadedProxyWhenFinalizeMethodIsInvoked() throws Exception { MongoPersistentEntity entity = mappingContext .getRequiredPersistentEntity(WithObjectMethodOverrideLazyDbRefs.class); @@ -525,7 +525,7 @@ public void shouldNotTriggerResolvingOfLazyLoadedProxyWhenFinalizeMethodIsInvoke } @Test // DATAMONGO-1194 - public void shouldBulkFetchListOfReferences() { + void shouldBulkFetchListOfReferences() { String id1 = "1"; String id2 = "2"; @@ -553,7 +553,7 @@ public void shouldBulkFetchListOfReferences() { } @Test // DATAMONGO-1666 - public void shouldBulkFetchSetOfReferencesForConstructorCreation() { + void shouldBulkFetchSetOfReferencesForConstructorCreation() { String id1 = "1"; String id2 = "2"; @@ -575,7 +575,7 @@ public void shouldBulkFetchSetOfReferencesForConstructorCreation() { } @Test // DATAMONGO-1194 - public void shouldFallbackToOneByOneFetchingWhenElementsInListOfReferencesPointToDifferentCollections() { + void shouldFallbackToOneByOneFetchingWhenElementsInListOfReferencesPointToDifferentCollections() { String id1 = "1"; String id2 = "2"; @@ -603,7 +603,7 @@ public void shouldFallbackToOneByOneFetchingWhenElementsInListOfReferencesPointT } @Test // DATAMONGO-1194 - public void shouldBulkFetchMapOfReferences() { + void shouldBulkFetchMapOfReferences() { MapDBRefVal val1 = new MapDBRefVal(); val1.id = BigInteger.ONE; @@ -635,7 +635,7 @@ public void shouldBulkFetchMapOfReferences() { } @Test // DATAMONGO-1194 - public void shouldBulkFetchLazyMapOfReferences() { + void shouldBulkFetchLazyMapOfReferences() { MapDBRefVal val1 = new MapDBRefVal(); val1.id = BigInteger.ONE; @@ -722,15 +722,15 @@ static class LazyDbRefTarget implements Serializable { @Id String id; String value; - public LazyDbRefTarget() { + LazyDbRefTarget() { this(null); } - public LazyDbRefTarget(String id) { + LazyDbRefTarget(String id) { this(id, null); } - public LazyDbRefTarget(String id, String value) { + LazyDbRefTarget(String id, String value) { this.id = id; this.value = value; } @@ -750,7 +750,7 @@ static class LazyDbRefTargetPropertyAccess implements Serializable { @Id @AccessType(Type.PROPERTY) String id; - public LazyDbRefTargetPropertyAccess(String id) { + LazyDbRefTargetPropertyAccess(String id) { this.id = id; } @@ -767,7 +767,7 @@ static class LazyDbRefTargetWithPeristenceConstructor extends LazyDbRefTarget { public LazyDbRefTargetWithPeristenceConstructor() {} @PersistenceConstructor - public LazyDbRefTargetWithPeristenceConstructor(String id, String value) { + LazyDbRefTargetWithPeristenceConstructor(String id, String value) { super(id, value); this.persistenceConstructorCalled = true; } @@ -783,7 +783,7 @@ static class LazyDbRefTargetWithPeristenceConstructorWithoutDefaultConstructor e boolean persistenceConstructorCalled; @PersistenceConstructor - public LazyDbRefTargetWithPeristenceConstructorWithoutDefaultConstructor(String id, String value) { + LazyDbRefTargetWithPeristenceConstructorWithoutDefaultConstructor(String id, String value) { super(id, value); this.persistenceConstructorCalled = true; } @@ -797,7 +797,7 @@ static class SerializableLazyDbRefTarget extends LazyDbRefTarget implements Seri public SerializableLazyDbRefTarget() {} - public SerializableLazyDbRefTarget(String id, String value) { + SerializableLazyDbRefTarget(String id, String value) { super(id, value); } @@ -810,7 +810,7 @@ static class ToStringObjectMethodOverrideLazyDbRefTarget extends LazyDbRefTarget public ToStringObjectMethodOverrideLazyDbRefTarget() {} - public ToStringObjectMethodOverrideLazyDbRefTarget(String id, String value) { + ToStringObjectMethodOverrideLazyDbRefTarget(String id, String value) { super(id, value); } @@ -830,7 +830,7 @@ static class EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget extends LazyDb public EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget() {} - public EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget(String id, String value) { + EqualsAndHashCodeObjectMethodOverrideLazyDbRefTarget(String id, String value) { super(id, value); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java index d7a2870477..a65214610e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DefaultDbRefResolverUnitTests.java @@ -37,6 +37,7 @@ import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.DocumentTestUtils; +import org.springframework.data.mongodb.core.MongoExceptionTranslator; import com.mongodb.DBRef; import com.mongodb.client.FindIterable; @@ -63,6 +64,7 @@ class DefaultDbRefResolverUnitTests { void setUp() { when(factoryMock.getMongoDatabase()).thenReturn(dbMock); + when(factoryMock.getExceptionTranslator()).thenReturn(new MongoExceptionTranslator()); when(dbMock.getCollection(anyString(), any(Class.class))).thenReturn(collectionMock); when(collectionMock.find(any(Document.class))).thenReturn(cursorMock); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingInterceptorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingInterceptorUnitTests.java index 5b758136e4..d357ca0f85 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingInterceptorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingInterceptorUnitTests.java @@ -26,7 +26,7 @@ import org.springframework.dao.DataAccessException; import org.springframework.dao.support.PersistenceExceptionTranslator; import org.springframework.data.mongodb.LazyLoadingException; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.LazyLoadingInterceptor; +import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory.LazyLoadingInterceptor; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import com.mongodb.DBRef; @@ -37,20 +37,20 @@ * @author Christoph Strobl */ @ExtendWith(MockitoExtension.class) -public class LazyLoadingInterceptorUnitTests { +class LazyLoadingInterceptorUnitTests { @Mock MongoPersistentProperty propertyMock; @Mock DBRef dbrefMock; @Mock DbRefResolverCallback callbackMock; @Test // DATAMONGO-1437 - public void shouldPreserveCauseForNonTranslatableExceptions() throws Throwable { + void shouldPreserveCauseForNonTranslatableExceptions() throws Throwable { NullPointerException npe = new NullPointerException("Some Exception we did not think about."); when(callbackMock.resolve(propertyMock)).thenThrow(npe); assertThatExceptionOfType(LazyLoadingException.class).isThrownBy(() -> { - new LazyLoadingInterceptor(propertyMock, dbrefMock, new NullExceptionTranslator(), callbackMock).intercept(null, + new LazyLoadingInterceptor(propertyMock, callbackMock, dbrefMock, new NullExceptionTranslator()).intercept(null, LazyLoadingProxy.class.getMethod("getTarget"), null, null); }).withCause(npe); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java index 91afb8c6ec..15e953930c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/LazyLoadingTestUtils.java @@ -16,12 +16,12 @@ package org.springframework.data.mongodb.core.convert; import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory.*; import java.util.function.Consumer; import org.springframework.aop.framework.Advised; import org.springframework.cglib.proxy.Factory; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver.LazyLoadingInterceptor; import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.test.util.ReflectionTestUtils; @@ -54,7 +54,7 @@ public static void assertProxyIsResolved(Object target, boolean expected) { public static void assertProxy(Object proxy, Consumer verification) { - LazyLoadingProxyFactory.LazyLoadingInterceptor interceptor = (LazyLoadingProxyFactory.LazyLoadingInterceptor) (proxy instanceof Advised + LazyLoadingInterceptor interceptor = (LazyLoadingInterceptor) (proxy instanceof Advised ? ((Advised) proxy).getAdvisors()[0].getAdvice() : ((Factory) proxy).getCallback(0)); @@ -68,9 +68,9 @@ private static LazyLoadingInterceptor extractInterceptor(Object proxy) { public static class LazyLoadingProxyValueRetriever { - LazyLoadingProxyFactory.LazyLoadingInterceptor interceptor; + LazyLoadingInterceptor interceptor; - public LazyLoadingProxyValueRetriever(LazyLoadingProxyFactory.LazyLoadingInterceptor interceptor) { + public LazyLoadingProxyValueRetriever(LazyLoadingInterceptor interceptor) { this.interceptor = interceptor; } From ece261aadb6e6ec5d94f15df63ca7fac1fce2cee Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 31 May 2021 09:58:30 +0200 Subject: [PATCH 022/983] Fix conversion for types having a converter registered. Fixes: #3660 Original pull request: #3662. --- .../core/convert/MappingMongoConverter.java | 14 +- .../MappingMongoConverterUnitTests.java | 196 ++++++++++++++++++ 2 files changed, 206 insertions(+), 4 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 413ce2ce44..d41decf3ed 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -171,7 +171,7 @@ protected ConversionContext getConversionContext(ObjectPath path) { Assert.notNull(path, "ObjectPath must not be null"); - return new ConversionContext(path, this::readDocument, this::readCollectionOrArray, this::readMap, this::readDBRef, + return new ConversionContext(conversions, path, this::readDocument, this::readCollectionOrArray, this::readMap, this::readDBRef, this::getPotentiallyConvertedSimpleRead); } @@ -1323,7 +1323,7 @@ protected Map readMap(ConversionContext context, Bson bson, Type } Object value = entry.getValue(); - map.put(key, context.convert(value, valueType)); + map.put(key, value == null ? value : context.convert(value, valueType)); } return map; @@ -1970,6 +1970,7 @@ public org.springframework.data.util.TypeInformation specialize(Cla */ protected static class ConversionContext { + private final org.springframework.data.convert.CustomConversions conversions; private final ObjectPath path; private final ContainerValueConverter documentConverter; private final ContainerValueConverter> collectionConverter; @@ -1977,10 +1978,11 @@ protected static class ConversionContext { private final ContainerValueConverter dbRefConverter; private final ValueConverter elementConverter; - ConversionContext(ObjectPath path, ContainerValueConverter documentConverter, + ConversionContext(org.springframework.data.convert.CustomConversions customConversions, ObjectPath path, ContainerValueConverter documentConverter, ContainerValueConverter> collectionConverter, ContainerValueConverter mapConverter, ContainerValueConverter dbRefConverter, ValueConverter elementConverter) { + this.conversions = customConversions; this.path = path; this.documentConverter = documentConverter; this.collectionConverter = collectionConverter; @@ -2001,6 +2003,10 @@ public S convert(Object source, TypeInformation Assert.notNull(typeHint, "TypeInformation must not be null"); + if (conversions.hasCustomReadTarget(source.getClass(), typeHint.getType())) { + return (S) elementConverter.convert(source, typeHint); + } + if (source instanceof Collection) { Class rawType = typeHint.getType(); @@ -2046,7 +2052,7 @@ public ConversionContext withPath(ObjectPath currentPath) { Assert.notNull(currentPath, "ObjectPath must not be null"); - return new ConversionContext(currentPath, documentConverter, collectionConverter, mapConverter, dbRefConverter, + return new ConversionContext(conversions, currentPath, documentConverter, collectionConverter, mapConverter, dbRefConverter, elementConverter); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index bd3e98788f..441b8e3347 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -81,6 +81,8 @@ import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback; import org.springframework.data.util.ClassTypeInformation; +import org.springframework.lang.NonNull; +import org.springframework.lang.Nullable; import org.springframework.test.util.ReflectionTestUtils; import com.mongodb.BasicDBList; @@ -2428,6 +2430,98 @@ void shouldUseMostConcreteCustomConversionTargetOnRead() { verify(subTypeOfGenericTypeConverter).convert(eq(source)); } + + @Test // GH-3660 + void usesCustomConverterForMapTypesOnWrite() { + + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new TypeImplementingMapToDocumentConverter()); + })); + converter.afterPropertiesSet(); + + TypeImplementingMap source = new TypeImplementingMap("one", 2); + org.bson.Document target = new org.bson.Document(); + + converter.write(source, target); + + assertThat(target).containsEntry("1st", "one").containsEntry("2nd", 2); + } + + @Test // GH-3660 + void usesCustomConverterForTypesImplementingMapOnWrite() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new TypeImplementingMapToDocumentConverter()); + })); + converter.afterPropertiesSet(); + + TypeImplementingMap source = new TypeImplementingMap("one", 2); + org.bson.Document target = new org.bson.Document(); + + converter.write(source, target); + + assertThat(target).containsEntry("1st", "one").containsEntry("2nd", 2); + } + + @Test // GH-3660 + void usesCustomConverterForTypesImplementingMapOnRead() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new DocumentToTypeImplementingMapConverter()); + })); + converter.afterPropertiesSet(); + + org.bson.Document source = new org.bson.Document("1st", "one") + .append("2nd", 2) + .append("_class", TypeImplementingMap.class.getName()); + + TypeImplementingMap target = converter.read(TypeImplementingMap.class, source); + + assertThat(target).isEqualTo(new TypeImplementingMap("one", 2)); + } + + @Test // GH-3660 + void usesCustomConverterForPropertiesUsingTypesThatImplementMapOnWrite() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new TypeImplementingMapToDocumentConverter()); + })); + converter.afterPropertiesSet(); + + TypeWrappingTypeImplementingMap source = new TypeWrappingTypeImplementingMap(); + source.typeImplementingMap = new TypeImplementingMap("one", 2); + org.bson.Document target = new org.bson.Document(); + + converter.write(source, target); + + assertThat(target).containsEntry("typeImplementingMap", new org.bson.Document("1st", "one").append("2nd", 2)); + } + + @Test // GH-3660 + void usesCustomConverterForPropertiesUsingTypesImplementingMapOnRead() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new DocumentToTypeImplementingMapConverter()); + })); + converter.afterPropertiesSet(); + + org.bson.Document source = new org.bson.Document("typeImplementingMap", + new org.bson.Document("1st", "one") + .append("2nd", 2)) + .append("_class", TypeWrappingTypeImplementingMap.class.getName()); + + TypeWrappingTypeImplementingMap target = converter.read(TypeWrappingTypeImplementingMap.class, source); + + assertThat(target.typeImplementingMap).isEqualTo(new TypeImplementingMap("one", 2)); + } + + static class GenericType { T content; } @@ -2971,4 +3065,106 @@ public SubTypeOfGenericType convert(org.bson.Document source) { return target; } } + + @WritingConverter + static class TypeImplementingMapToDocumentConverter implements Converter { + + @Nullable + @Override + public org.bson.Document convert(TypeImplementingMap source) { + return new org.bson.Document("1st", source.val1).append("2nd", source.val2); + } + } + + @ReadingConverter + static class DocumentToTypeImplementingMapConverter implements Converter { + + @Nullable + @Override + public TypeImplementingMap convert(org.bson.Document source) { + return new TypeImplementingMap(source.getString("1st"), source.getInteger("2nd")); + } + } + + static class TypeWrappingTypeImplementingMap { + + String id; + TypeImplementingMap typeImplementingMap; + } + + @EqualsAndHashCode + static class TypeImplementingMap implements Map { + + String val1; + int val2; + + public TypeImplementingMap(String val1, int val2) { + this.val1 = val1; + this.val2 = val2; + } + + @Override + public int size() { + return 0; + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public boolean containsKey(Object key) { + return false; + } + + @Override + public boolean containsValue(Object value) { + return false; + } + + @Override + public String get(Object key) { + return null; + } + + @Nullable + @Override + public String put(String key, String value) { + return null; + } + + @Override + public String remove(Object key) { + return null; + } + + @Override + public void putAll(@NonNull Map m) { + + } + + @Override + public void clear() { + + } + + @NonNull + @Override + public Set keySet() { + return null; + } + + @NonNull + @Override + public Collection values() { + return null; + } + + @NonNull + @Override + public Set> entrySet() { + return null; + } + } } From 2b715c54d3749adf4df0d9020343393eb31afbc0 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 9 Jun 2021 11:32:15 +0200 Subject: [PATCH 023/983] Polishing. Reformat code. See #3660. Original pull request: #3662. --- .../core/convert/MappingMongoConverter.java | 35 ++++++++++--------- .../MappingMongoConverterUnitTests.java | 28 +++++++-------- 2 files changed, 32 insertions(+), 31 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index d41decf3ed..bcb9575b92 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -171,8 +171,8 @@ protected ConversionContext getConversionContext(ObjectPath path) { Assert.notNull(path, "ObjectPath must not be null"); - return new ConversionContext(conversions, path, this::readDocument, this::readCollectionOrArray, this::readMap, this::readDBRef, - this::getPotentiallyConvertedSimpleRead); + return new ConversionContext(conversions, path, this::readDocument, this::readCollectionOrArray, this::readMap, + this::readDBRef, this::getPotentiallyConvertedSimpleRead); } /** @@ -527,7 +527,8 @@ private void readAssociation(Association association, P return; } - if (property.isDocumentReference() || (!property.isDbReference() && property.findAnnotation(Reference.class) != null)) { + if (property.isDocumentReference() + || (!property.isDbReference() && property.findAnnotation(Reference.class) != null)) { // quite unusual but sounds like worth having? @@ -595,13 +596,13 @@ public DocumentPointer toDocumentPointer(Object source, @Nullable MongoPersisten Assert.notNull(referringProperty, "Cannot create DocumentReference. The referringProperty must not be null!"); - if (referringProperty.isDbReference()) { - return () -> toDBRef(source, referringProperty); - } + if (referringProperty.isDbReference()) { + return () -> toDBRef(source, referringProperty); + } - if (referringProperty.isDocumentReference() || referringProperty.findAnnotation(Reference.class) != null) { - return createDocumentPointer(source, referringProperty); - } + if (referringProperty.isDocumentReference() || referringProperty.findAnnotation(Reference.class) != null) { + return createDocumentPointer(source, referringProperty); + } throw new IllegalArgumentException("The referringProperty is neither a DBRef nor a document reference"); } @@ -612,7 +613,7 @@ DocumentPointer createDocumentPointer(Object source, @Nullable MongoPersisten return () -> source; } - if(source instanceof DocumentPointer) { + if (source instanceof DocumentPointer) { return (DocumentPointer) source; } @@ -622,7 +623,8 @@ DocumentPointer createDocumentPointer(Object source, @Nullable MongoPersisten } if (ClassUtils.isAssignableValue(referringProperty.getAssociationTargetType(), source)) { - return documentPointerFactory.computePointer(mappingContext, referringProperty, source, referringProperty.getActualType()); + return documentPointerFactory.computePointer(mappingContext, referringProperty, source, + referringProperty.getActualType()); } return () -> source; @@ -1978,9 +1980,10 @@ protected static class ConversionContext { private final ContainerValueConverter dbRefConverter; private final ValueConverter elementConverter; - ConversionContext(org.springframework.data.convert.CustomConversions customConversions, ObjectPath path, ContainerValueConverter documentConverter, - ContainerValueConverter> collectionConverter, ContainerValueConverter mapConverter, - ContainerValueConverter dbRefConverter, ValueConverter elementConverter) { + ConversionContext(org.springframework.data.convert.CustomConversions customConversions, ObjectPath path, + ContainerValueConverter documentConverter, ContainerValueConverter> collectionConverter, + ContainerValueConverter mapConverter, ContainerValueConverter dbRefConverter, + ValueConverter elementConverter) { this.conversions = customConversions; this.path = path; @@ -2052,8 +2055,8 @@ public ConversionContext withPath(ObjectPath currentPath) { Assert.notNull(currentPath, "ObjectPath must not be null"); - return new ConversionContext(conversions, currentPath, documentConverter, collectionConverter, mapConverter, dbRefConverter, - elementConverter); + return new ConversionContext(conversions, currentPath, documentConverter, collectionConverter, mapConverter, + dbRefConverter, elementConverter); } public ObjectPath getPath() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index 441b8e3347..2009b5314b 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -30,7 +30,6 @@ import java.time.temporal.ChronoUnit; import java.util.*; -import org.assertj.core.api.Assertions; import org.bson.types.Code; import org.bson.types.Decimal128; import org.bson.types.ObjectId; @@ -42,6 +41,7 @@ import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.aop.framework.ProxyFactory; import org.springframework.beans.ConversionNotSupportedException; import org.springframework.beans.factory.annotation.Value; @@ -529,7 +529,7 @@ void writesBigIntegerIdCorrectly() { } @Test - public void convertsObjectsIfNecessary() { + void convertsObjectsIfNecessary() { ObjectId id = new ObjectId(); assertThat(converter.convertToMongoType(id)).isEqualTo(id); @@ -2113,21 +2113,21 @@ void shouldAllowReadingBackDbObject() { } @Test // DATAMONGO-2479 - public void entityCallbacksAreNotSetByDefault() { - Assertions.assertThat(ReflectionTestUtils.getField(converter, "entityCallbacks")).isNull(); + void entityCallbacksAreNotSetByDefault() { + assertThat(ReflectionTestUtils.getField(converter, "entityCallbacks")).isNull(); } @Test // DATAMONGO-2479 - public void entityCallbacksShouldBeInitiatedOnSettingApplicationContext() { + void entityCallbacksShouldBeInitiatedOnSettingApplicationContext() { ApplicationContext ctx = new StaticApplicationContext(); converter.setApplicationContext(ctx); - Assertions.assertThat(ReflectionTestUtils.getField(converter, "entityCallbacks")).isNotNull(); + assertThat(ReflectionTestUtils.getField(converter, "entityCallbacks")).isNotNull(); } @Test // DATAMONGO-2479 - public void setterForEntityCallbackOverridesContextInitializedOnes() { + void setterForEntityCallbackOverridesContextInitializedOnes() { ApplicationContext ctx = new StaticApplicationContext(); converter.setApplicationContext(ctx); @@ -2135,11 +2135,11 @@ public void setterForEntityCallbackOverridesContextInitializedOnes() { EntityCallbacks callbacks = EntityCallbacks.create(); converter.setEntityCallbacks(callbacks); - Assertions.assertThat(ReflectionTestUtils.getField(converter, "entityCallbacks")).isSameAs(callbacks); + assertThat(ReflectionTestUtils.getField(converter, "entityCallbacks")).isSameAs(callbacks); } @Test // DATAMONGO-2479 - public void setterForApplicationContextShouldNotOverrideAlreadySetEntityCallbacks() { + void setterForApplicationContextShouldNotOverrideAlreadySetEntityCallbacks() { EntityCallbacks callbacks = EntityCallbacks.create(); ApplicationContext ctx = new StaticApplicationContext(); @@ -2147,11 +2147,11 @@ public void setterForApplicationContextShouldNotOverrideAlreadySetEntityCallback converter.setEntityCallbacks(callbacks); converter.setApplicationContext(ctx); - Assertions.assertThat(ReflectionTestUtils.getField(converter, "entityCallbacks")).isSameAs(callbacks); + assertThat(ReflectionTestUtils.getField(converter, "entityCallbacks")).isSameAs(callbacks); } @Test // DATAMONGO-2479 - public void resolveDBRefMapValueShouldInvokeCallbacks() { + void resolveDBRefMapValueShouldInvokeCallbacks() { AfterConvertCallback afterConvertCallback = spy(new ReturningAfterConvertCallback()); converter.setEntityCallbacks(EntityCallbacks.create(afterConvertCallback)); @@ -2168,7 +2168,7 @@ public void resolveDBRefMapValueShouldInvokeCallbacks() { } @Test // DATAMONGO-2300 - public void readAndConvertDBRefNestedByMapCorrectly() { + void readAndConvertDBRefNestedByMapCorrectly() { org.bson.Document cluster = new org.bson.Document("_id", 100L); DBRef dbRef = new DBRef("clusters", 100L); @@ -2434,7 +2434,6 @@ void shouldUseMostConcreteCustomConversionTargetOnRead() { @Test // GH-3660 void usesCustomConverterForMapTypesOnWrite() { - converter = new MappingMongoConverter(resolver, mappingContext); converter.setCustomConversions(MongoCustomConversions.create(it -> { it.registerConverter(new TypeImplementingMapToDocumentConverter()); @@ -2521,7 +2520,6 @@ void usesCustomConverterForPropertiesUsingTypesImplementingMapOnRead() { assertThat(target.typeImplementingMap).isEqualTo(new TypeImplementingMap("one", 2)); } - static class GenericType { T content; } @@ -3098,7 +3096,7 @@ static class TypeImplementingMap implements Map { String val1; int val2; - public TypeImplementingMap(String val1, int val2) { + TypeImplementingMap(String val1, int val2) { this.val1 = val1; this.val2 = val2; } From 149a703ecce3d0cb4ee8dcb8169184a2f1e801f0 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 4 May 2021 13:35:45 +0200 Subject: [PATCH 024/983] Fix NPE in QueryMapper when trying to apply target type on null value. Closes #3633 Original pull request: #3643. --- .../data/mongodb/core/convert/QueryMapper.java | 6 +++--- .../mongodb/core/convert/QueryMapperUnitTests.java | 11 +++++++++++ .../AbstractPersonRepositoryIntegrationTests.java | 11 +++++++++++ .../data/mongodb/repository/PersonRepository.java | 3 +++ .../json/ParameterBindingJsonReaderUnitTests.java | 7 +++++++ 5 files changed, 35 insertions(+), 3 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index 81c1c96ddf..65f66ec4cb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -756,12 +756,12 @@ protected boolean isKeyword(String candidate) { * converted one by one. * * @param documentField the field and its meta data - * @param value the actual value + * @param value the actual value. Can be {@literal null}. * @return the potentially converted target value. */ - private Object applyFieldTargetTypeHintToValue(Field documentField, Object value) { + private Object applyFieldTargetTypeHintToValue(Field documentField, @Nullable Object value) { - if (documentField.getProperty() == null || !documentField.getProperty().hasExplicitWriteTarget()) { + if (value == null || documentField.getProperty() == null || !documentField.getProperty().hasExplicitWriteTarget()) { return value; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index f7b5ec76d7..8c23b4a222 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -1255,6 +1255,17 @@ void resolvesFieldNameWithUnderscoreOnNestedMappedFieldnameWithUnderscoresCorrec assertThat(document).isEqualTo(new org.bson.Document("double_underscore.renamed", new org.bson.Document("$exists", true))); } + @Test // GH-3633 + void mapsNullValueForFieldWithCustomTargetType() { + + Query query = query(where("stringAsOid").is(null)); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(NonIdFieldWithObjectIdTargetType.class)); + + assertThat(document).isEqualTo(new org.bson.Document("stringAsOid", null)); + } + class WithDeepArrayNesting { List level0; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java index 61caa30560..d576913850 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java @@ -61,7 +61,9 @@ import org.springframework.data.mongodb.core.aggregation.AggregationResults; import org.springframework.data.mongodb.core.geo.GeoJsonPoint; import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; import org.springframework.data.mongodb.repository.Person.Sex; import org.springframework.data.mongodb.repository.SampleEvaluationContextExtension.SampleSecurityContextHolder; import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; @@ -1435,6 +1437,15 @@ void annotatedQueryShouldAllowAggregationInProjection() { assertThat(target.getFirstname()).isEqualTo(alicia.getFirstname().toUpperCase()); } + @Test // GH-3633 + void annotatedQueryWithNullEqualityCheckShouldWork() { + + operations.updateFirst(Query.query(Criteria.where("id").is(dave.getId())), Update.update("age", null), Person.class); + + Person byQueryWithNullEqualityCheck = repository.findByQueryWithNullEqualityCheck(); + assertThat(byQueryWithNullEqualityCheck.getId()).isEqualTo(dave.getId()); + } + @Test // GH-3602 void executesQueryWithDocumentReferenceCorrectly() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java index ca382fa2ca..155cf7a7b9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java @@ -417,5 +417,8 @@ Person findPersonByManyArguments(String firstname, String lastname, String email List findByUnwrappedUser(User user); + @Query("{ 'age' : null }") + Person findByQueryWithNullEqualityCheck(); + List findBySpiritAnimal(User user); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/ParameterBindingJsonReaderUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/ParameterBindingJsonReaderUnitTests.java index 72ab2b454b..1a684af164 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/ParameterBindingJsonReaderUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/ParameterBindingJsonReaderUnitTests.java @@ -383,6 +383,13 @@ void shouldParseNestedArrays() { .parse("{ 'stores.location' : { $geoWithin: { $centerSphere: [ [ 1.948516, 48.799029 ] , 0.004 ] } } }")); } + @Test // GH-3633 + void parsesNullValue() { + + Document target = parse("{ 'parent' : null }"); + assertThat(target).isEqualTo(new Document("parent", null)); + } + private static Document parse(String json, Object... args) { ParameterBindingJsonReader reader = new ParameterBindingJsonReader(json, args); From efa9a2d40877f78a95e4b9314ed7ae5ff2ed23c1 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 4 May 2021 14:29:52 +0200 Subject: [PATCH 025/983] Add Criteria.isNullValue() as alternative to Criteria.is(null). See #3633 Original pull request: #3643. --- .../data/mongodb/core/query/Criteria.java | 37 +++++++++++++++++++ .../core/convert/QueryMapperUnitTests.java | 13 ++++++- 2 files changed, 49 insertions(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java index b0a1b49893..9b1e8df940 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java @@ -29,6 +29,7 @@ import java.util.stream.Collectors; import org.bson.BsonRegularExpression; +import org.bson.BsonType; import org.bson.Document; import org.bson.types.Binary; import org.springframework.data.domain.Example; @@ -188,6 +189,42 @@ public Criteria is(@Nullable Object value) { return this; } + /** + * Creates a criterion using {@literal null} equality comparison which matches documents that either contain the item + * field whose value is {@literal null} or that do not contain the item field. + *

        + * Use {@link #isNullValue()} to only query for documents that contain the field whose value is equal to + * {@link org.bson.BsonType#NULL}.
        + * Use {@link #exists(boolean)} to query for documents that do (not) contain the field. + * + * @return this. + * @see Query for Null or + * Missing Fields: Equality Filter + * @since 3.3 + */ + public Criteria isNull() { + return is(null); + } + + /** + * Creates a criterion using a {@link org.bson.BsonType} comparison which matches only documents that contain the item + * field whose value is equal to {@link org.bson.BsonType#NULL}. + *

        + * Use {@link #isNull()} to query for documents that contain the field with a {@literal null} value or do not contain the + * field at all.
        + * Use {@link #exists(boolean)} to query for documents that do (not) contain the field. + * + * @return this. + * @see Query for Null or Missing + * Fields: Type Check + * @since 3.3 + */ + public Criteria isNullValue() { + + criteria.put("$type", BsonType.NULL.getValue()); + return this; + } + private boolean lastOperatorWasNot() { return !this.criteria.isEmpty() && "$not".equals(this.criteria.keySet().toArray()[this.criteria.size() - 1]); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index 8c23b4a222..dd8dd25e8f 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -1258,7 +1258,7 @@ void resolvesFieldNameWithUnderscoreOnNestedMappedFieldnameWithUnderscoresCorrec @Test // GH-3633 void mapsNullValueForFieldWithCustomTargetType() { - Query query = query(where("stringAsOid").is(null)); + Query query = query(where("stringAsOid").isNull()); org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(NonIdFieldWithObjectIdTargetType.class)); @@ -1266,6 +1266,17 @@ void mapsNullValueForFieldWithCustomTargetType() { assertThat(document).isEqualTo(new org.bson.Document("stringAsOid", null)); } + @Test // GH-3633 + void mapsNullBsonTypeForFieldWithCustomTargetType() { + + Query query = query(where("stringAsOid").isNullValue()); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(NonIdFieldWithObjectIdTargetType.class)); + + assertThat(document).isEqualTo(new org.bson.Document("stringAsOid", new org.bson.Document("$type", 10))); + } + class WithDeepArrayNesting { List level0; From a481636429fd92af83a8226c0c51ae1cbf13afe3 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 9 Jun 2021 12:06:43 +0200 Subject: [PATCH 026/983] Polishing. Add nullability annotation. Return early on null value conversion. See #3633 Original pull request: #3643. --- .../data/mongodb/core/convert/QueryMapper.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index 65f66ec4cb..5840815562 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -445,6 +445,10 @@ else if (isDocument(value)) { } } + if (value == null) { + return null; + } + if (isNestedKeyword(value)) { return getMappedKeyword(new Keyword((Bson) value), documentField.getPropertyEntity()); } @@ -711,7 +715,7 @@ public Object convertId(@Nullable Object id, Class targetType) { * @param candidate * @return */ - protected boolean isNestedKeyword(Object candidate) { + protected boolean isNestedKeyword(@Nullable Object candidate) { if (!(candidate instanceof Document)) { return false; @@ -759,6 +763,7 @@ protected boolean isKeyword(String candidate) { * @param value the actual value. Can be {@literal null}. * @return the potentially converted target value. */ + @Nullable private Object applyFieldTargetTypeHintToValue(Field documentField, @Nullable Object value) { if (value == null || documentField.getProperty() == null || !documentField.getProperty().hasExplicitWriteTarget()) { From b1020d19baf0d61a4be8fc98aa0f09e514af0ea2 Mon Sep 17 00:00:00 2001 From: Divya Srivastava Date: Sun, 9 May 2021 16:16:43 +0530 Subject: [PATCH 027/983] Add an option to `@Field` annotation to include/exclude null values on write. MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Properties can be annotated with `@Field(write=…)` to control whether a property with a null value should be included or omitted (default) during conversion in the target Document. Closes #3407 Original pull request: #3646. --- .../core/convert/MappingMongoConverter.java | 3 +++ .../mapping/BasicMongoPersistentProperty.java | 14 +++++++++++ .../data/mongodb/core/mapping/Field.java | 25 +++++++++++++++++++ .../core/mapping/MongoPersistentProperty.java | 18 +++++++++++++ .../UnwrappedMongoPersistentProperty.java | 10 ++++++++ ...BasicMongoPersistentPropertyUnitTests.java | 13 ++++++++++ 6 files changed, 83 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index bcb9575b92..86be47c558 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -744,6 +744,9 @@ private void writeProperties(Bson bson, MongoPersistentEntity entity, Persist Object value = accessor.getProperty(prop); if (value == null) { + if(!prop.isPropertyOmittableOnNull()) { + writeSimpleInternal(value, bson , prop); + } continue; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java index 53af00fc54..3973f802a2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java @@ -285,4 +285,18 @@ public boolean isExplicitLanguageProperty() { public boolean isTextScoreProperty() { return isAnnotationPresent(TextScore.class); } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isPropertyOmittableOnNull() + */ + public boolean isPropertyOmittableOnNull() { + org.springframework.data.mongodb.core.mapping.Field annotation = findAnnotation( + org.springframework.data.mongodb.core.mapping.Field.class); + + if ( annotation != null && annotation.write().equals(Field.Write.ALWAYS) ) { + return false; + } + return true; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java index 45e8b815fb..b2365f8d6c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java @@ -34,6 +34,21 @@ @Target({ ElementType.FIELD, ElementType.METHOD, ElementType.ANNOTATION_TYPE }) public @interface Field { + /** + * Enumeration of write strategies for a field with null value.It decides whether a field with null value has to be + * written to the resulting document to be saved to the database. + */ + enum Write{ + /* + * The field will always be written to the database irrespective of null value. + */ + ALWAYS, + /* + * The field will only be written to the database if it has a non null value. + */ + NON_NULL + } + /** * The key to be used to store the field inside the document. Alias for {@link #name()}. * @@ -65,4 +80,14 @@ * @since 2.2 */ FieldType targetType() default FieldType.IMPLICIT; + + /** + * If set to {@link Write#NON_NULL} {@literal null} values will be omitted. + * Setting the value to {@link Write#ALWAYS} explicitly adds an entry for the given field + * holding {@literal null} as a value {@code 'fieldName' : null }. + *

        + * NOTE Setting the value to {@link Write#ALWAYS} may lead to increased document size. + * @return {@link Write#NON_NULL} by default. + */ + Write write() default Write.NON_NULL; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java index c753f3856d..10f35435d2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java @@ -104,6 +104,24 @@ public interface MongoPersistentProperty extends PersistentProperty + * It's annotated with {@link Field.Write}. + * + * @return + * @since 1.6 + */ + boolean isPropertyOmittableOnNull(); + + /** + * Returns whether the property is to be written to the document if the value is null
        + * It's annotated with {@link omitNull}. + * + * @return + * @since 1.6 + */ + boolean isOmitNullProperty(); /** * Returns the {@link DBRef} if the property is a reference. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java index f8218171c5..3d7de5d094 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java @@ -92,6 +92,11 @@ public boolean isExplicitLanguageProperty() { public boolean isTextScoreProperty() { return delegate.isTextScoreProperty(); } + + @Override + public boolean isOmitNullProperty() { + return delegate.isOmitNullProperty(); + } @Override @Nullable @@ -315,4 +320,9 @@ public Class getAssociationTargetType() { public PersistentPropertyAccessor getAccessorForOwner(T owner) { return delegate.getAccessorForOwner(owner); } + + @Override + public boolean isPropertyOmittableOnNull() { + return delegate.isPropertyOmittableOnNull(); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java index 3fb4f59084..69f9cb6613 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java @@ -146,6 +146,13 @@ public void shouldDetectTextScorePropertyCorrectly() { assertThat(property.isTextScoreProperty()).isTrue(); } + @Test // DATAMONGO-2551 + public void shouldDetectOmittableOnNullPropertyCorrectly() { + + MongoPersistentProperty property = getPropertyFor(DocumentWithOmittableOnNullProperty.class, "write"); + assertThat(property.isPropertyOmittableOnNull()).isTrue(); + } + @Test // DATAMONGO-976 public void shouldDetectTextScoreAsReadOnlyProperty() { @@ -297,6 +304,12 @@ static class DocumentWithTextScoreProperty { @TextScore Float score; } + static class DocumentWithOmittableOnNullProperty { + + @org.springframework.data.mongodb.core.mapping.Field("write") org.springframework.data.mongodb.core.mapping.Field.Write write; + + } + static class DocumentWithExplicitlyRenamedIdProperty { @org.springframework.data.mongodb.core.mapping.Field("id") String id; From c217618d9dfb29d1de45e7767844cff2d668d956 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Mon, 14 Jun 2021 09:30:08 +0200 Subject: [PATCH 028/983] Polishing. Reorder methods and types. Rename MongoPersistentProperty.isOmitNullProperty to writeNullValues. Adapt caching MongoPersistentProperty and add tests. Tweak Javadoc wording, add author and since tags. See #3407 Original pull request: #3646. --- .../core/convert/MappingMongoConverter.java | 20 +++-- .../mapping/BasicMongoPersistentProperty.java | 27 +++--- .../CachingMongoPersistentProperty.java | 15 ++++ .../data/mongodb/core/mapping/Field.java | 44 ++++++---- .../core/mapping/MongoPersistentProperty.java | 28 +++--- .../UnwrappedMongoPersistentProperty.java | 14 ++- .../data/mongodb/core/MongoTemplateTests.java | 88 ++++++++++++++++++- .../mongodb/core/MongoTemplateUnitTests.java | 6 +- .../MappingMongoConverterUnitTests.java | 28 ++++++ ...BasicMongoPersistentPropertyUnitTests.java | 73 ++++++++------- src/main/asciidoc/new-features.adoc | 1 + 11 files changed, 242 insertions(+), 102 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 86be47c558..732e8c9a51 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -103,6 +103,7 @@ * @author Mark Paluch * @author Roman Puchkovskiy * @author Heesu Jung + * @author Divya Srivastava */ public class MappingMongoConverter extends AbstractMongoConverter implements ApplicationContextAware { @@ -737,6 +738,7 @@ private void writeProperties(Bson bson, MongoPersistentEntity entity, Persist continue; } if (prop.isAssociation()) { + writeAssociation(prop.getRequiredAssociation(), accessor, dbObjectAccessor); continue; } @@ -744,13 +746,10 @@ private void writeProperties(Bson bson, MongoPersistentEntity entity, Persist Object value = accessor.getProperty(prop); if (value == null) { - if(!prop.isPropertyOmittableOnNull()) { - writeSimpleInternal(value, bson , prop); + if (prop.writeNullValues()) { + dbObjectAccessor.put(prop, null); } - continue; - } - - if (!conversions.isSimpleType(value.getClass())) { + } else if (!conversions.isSimpleType(value.getClass())) { writePropertyInternal(value, dbObjectAccessor, prop); } else { writeSimpleInternal(value, bson, prop); @@ -763,7 +762,14 @@ private void writeAssociation(Association association, MongoPersistentProperty inverseProp = association.getInverse(); - writePropertyInternal(accessor.getProperty(inverseProp), dbObjectAccessor, inverseProp); + Object value = accessor.getProperty(inverseProp); + + if (value == null && !inverseProp.isUnwrapped() && inverseProp.writeNullValues()) { + dbObjectAccessor.put(inverseProp, null); + return; + } + + writePropertyInternal(value, dbObjectAccessor, inverseProp); } @SuppressWarnings({ "unchecked" }) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java index 3973f802a2..87eb56b732 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java @@ -41,6 +41,7 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author Divya Srivastava */ public class BasicMongoPersistentProperty extends AnnotationBasedPersistentProperty implements MongoPersistentProperty { @@ -214,6 +215,19 @@ public int getFieldOrder() { return annotation != null ? annotation.order() : Integer.MAX_VALUE; } + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#skipNullValues() + */ + @Override + public boolean writeNullValues() { + + org.springframework.data.mongodb.core.mapping.Field annotation = findAnnotation( + org.springframework.data.mongodb.core.mapping.Field.class); + + return annotation != null && annotation.write() == Field.Write.ALWAYS; + } + /* * (non-Javadoc) * @see org.springframework.data.mapping.model.AbstractPersistentProperty#createAssociation() @@ -286,17 +300,4 @@ public boolean isTextScoreProperty() { return isAnnotationPresent(TextScore.class); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.mapping.MongoPersistentProperty#isPropertyOmittableOnNull() - */ - public boolean isPropertyOmittableOnNull() { - org.springframework.data.mongodb.core.mapping.Field annotation = findAnnotation( - org.springframework.data.mongodb.core.mapping.Field.class); - - if ( annotation != null && annotation.write().equals(Field.Write.ALWAYS) ) { - return false; - } - return true; - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/CachingMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/CachingMongoPersistentProperty.java index 7af678541b..53a501e068 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/CachingMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/CachingMongoPersistentProperty.java @@ -33,6 +33,7 @@ public class CachingMongoPersistentProperty extends BasicMongoPersistentProperty private boolean dbRefResolved; private @Nullable DBRef dbref; private @Nullable String fieldName; + private @Nullable Boolean writeNullValues; private @Nullable Class fieldType; private @Nullable Boolean usePropertyAccess; private @Nullable Boolean isTransient; @@ -90,6 +91,20 @@ public String getFieldName() { return this.fieldName; } + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty#skipNullValues() + */ + @Override + public boolean writeNullValues() { + + if (this.writeNullValues == null) { + this.writeNullValues = super.writeNullValues(); + } + + return this.writeNullValues; + } + /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.mapping.BasicMongoPersistentProperty#getFieldType() diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java index b2365f8d6c..b606cf15b9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Field.java @@ -28,27 +28,13 @@ * * @author Oliver Gierke * @author Christoph Strobl + * @author Divya Srivastava */ @Documented @Retention(RetentionPolicy.RUNTIME) @Target({ ElementType.FIELD, ElementType.METHOD, ElementType.ANNOTATION_TYPE }) public @interface Field { - /** - * Enumeration of write strategies for a field with null value.It decides whether a field with null value has to be - * written to the resulting document to be saved to the database. - */ - enum Write{ - /* - * The field will always be written to the database irrespective of null value. - */ - ALWAYS, - /* - * The field will only be written to the database if it has a non null value. - */ - NON_NULL - } - /** * The key to be used to store the field inside the document. Alias for {@link #name()}. * @@ -82,12 +68,32 @@ enum Write{ FieldType targetType() default FieldType.IMPLICIT; /** - * If set to {@link Write#NON_NULL} {@literal null} values will be omitted. - * Setting the value to {@link Write#ALWAYS} explicitly adds an entry for the given field - * holding {@literal null} as a value {@code 'fieldName' : null }. + * Write rules when to include a property value upon conversion. If set to {@link Write#NON_NULL} (default) + * {@literal null} values are not written to the target {@code Document}. Setting the value to {@link Write#ALWAYS} + * explicitly adds an entry for the given field holding {@literal null} as a value {@code 'fieldName' : null }. *

        - * NOTE Setting the value to {@link Write#ALWAYS} may lead to increased document size. + * NOTESetting the value to {@link Write#ALWAYS} may lead to increased document size. + * * @return {@link Write#NON_NULL} by default. + * @since 3.3 */ Write write() default Write.NON_NULL; + + /** + * Enumeration of write strategies to define when a property is included for write conversion. + * + * @since 3.3 + */ + enum Write { + + /** + * Value that indicates that property is to be always included, independent of value of the property. + */ + ALWAYS, + + /** + * Value that indicates that only properties with non-{@literal null} values are to be included. + */ + NON_NULL + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java index 10f35435d2..2bd387d74c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java @@ -28,6 +28,7 @@ * @author Patryk Wasik * @author Thomas Darimont * @author Christoph Strobl + * @author Divya Srivastava */ public interface MongoPersistentProperty extends PersistentProperty { @@ -54,6 +55,15 @@ public interface MongoPersistentProperty extends PersistentProperty - * It's annotated with {@link Field.Write}. - * - * @return - * @since 1.6 - */ - boolean isPropertyOmittableOnNull(); - - /** - * Returns whether the property is to be written to the document if the value is null
        - * It's annotated with {@link omitNull}. - * - * @return - * @since 1.6 - */ - boolean isOmitNullProperty(); /** * Returns the {@link DBRef} if the property is a reference. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java index 3d7de5d094..6d4b163d22 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java @@ -63,6 +63,11 @@ public int getFieldOrder() { return delegate.getFieldOrder(); } + @Override + public boolean writeNullValues() { + return delegate.writeNullValues(); + } + @Override public boolean isDbReference() { return delegate.isDbReference(); @@ -92,11 +97,6 @@ public boolean isExplicitLanguageProperty() { public boolean isTextScoreProperty() { return delegate.isTextScoreProperty(); } - - @Override - public boolean isOmitNullProperty() { - return delegate.isOmitNullProperty(); - } @Override @Nullable @@ -321,8 +321,4 @@ public PersistentPropertyAccessor getAccessorForOwner(T owner) { return delegate.getAccessorForOwner(owner); } - @Override - public boolean isPropertyOmittableOnNull() { - return delegate.isPropertyOmittableOnNull(); - } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java index b18e1066f5..f5521008f8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java @@ -24,6 +24,7 @@ import lombok.Data; import lombok.EqualsAndHashCode; import lombok.NoArgsConstructor; +import lombok.RequiredArgsConstructor; import lombok.Value; import lombok.With; @@ -1692,7 +1693,7 @@ public void countAndFindWithoutTypeInformation() { assertThat(template.count(query, collectionName)).isEqualTo(1L); } - @Test // DATAMONGO-571 + @Test // DATAMONGO-571, GH-3407 public void nullsPropertiesForVersionObjectUpdates() { VersionedPerson person = new VersionedPerson(); @@ -1702,11 +1703,17 @@ public void nullsPropertiesForVersionObjectUpdates() { template.save(person); assertThat(person.id).isNotNull(); + person.firstname = null; person.lastname = null; template.save(person); person = template.findOne(query(where("id").is(person.id)), VersionedPerson.class); + assertThat(person.firstname).isNull(); assertThat(person.lastname).isNull(); + + org.bson.Document document = template.findOne(query(where("_id").is(person.id)), org.bson.Document.class, + "versionedPerson"); + assertThat(document).doesNotContainKey("firstname").containsEntry("lastname", null); } @Test // DATAMONGO-571 @@ -3703,6 +3710,64 @@ public void sortOnIdFieldWithExplicitTypeShouldWork() { assertThat(template.find(new BasicQuery("{}").with(Sort.by("id")), WithIdAndFieldAnnotation.class)).isNotEmpty(); } + @Test // GH-3407 + void shouldWriteSubdocumentWithNullCorrectly() { + + template.dropCollection(WithSubdocument.class); + + WithSubdocument doc = new WithSubdocument(); + SubdocumentWithWriteNull subdoc = new SubdocumentWithWriteNull("Walter", "White"); + doc.subdocument = subdoc; + + template.save(doc); + + org.bson.Document loaded = template.findById(doc.id, org.bson.Document.class, "withSubdocument"); + + assertThat(loaded.get("subdocument", org.bson.Document.class)).hasSize(3).containsEntry("firstname", "Walter") + .containsEntry("nickname", null); + } + + @Test // GH-3407 + void shouldUpdateSubdocumentWithNullCorrectly() { + + template.dropCollection(WithSubdocument.class); + + WithSubdocument doc = new WithSubdocument(); + SubdocumentWithWriteNull subdoc = new SubdocumentWithWriteNull("Walter", "White"); + subdoc.nickname = "Heisenberg"; + doc.subdocument = subdoc; + + template.save(doc); + + String id = doc.id; + + doc.id = null; + subdoc.nickname = null; + template.update(WithSubdocument.class).replaceWith(doc).findAndReplaceValue(); + + org.bson.Document loaded = template.findById(id, org.bson.Document.class, "withSubdocument"); + + assertThat(loaded.get("subdocument", org.bson.Document.class)).hasSize(3).containsEntry("firstname", "Walter") + .containsEntry("nickname", null); + } + + @Test // GH-3407 + void shouldFindSubdocumentWithNullCorrectly() { + + template.dropCollection(WithSubdocument.class); + + WithSubdocument doc = new WithSubdocument(); + SubdocumentWithWriteNull subdoc = new SubdocumentWithWriteNull("Walter", "White"); + doc.subdocument = subdoc; + + template.save(doc); + + org.bson.Document loaded = template.findOne(query(where("subdocument").is(subdoc)), org.bson.Document.class, + "withSubdocument"); + + assertThat(loaded).isNotNull(); + } + private AtomicReference createAfterSaveReference() { AtomicReference saved = new AtomicReference<>(); @@ -4020,7 +4085,8 @@ public int hashCode() { static class VersionedPerson { @Version Long version; - String id, firstname, lastname; + String id, firstname; + @Field(write = Field.Write.ALWAYS) String lastname; } static class TypeWithFieldAnnotation { @@ -4247,4 +4313,22 @@ static class WithIdAndFieldAnnotation { String value; } + + @Data + static class WithSubdocument { + + @Id // + @Field(name = "_id") // + String id; + SubdocumentWithWriteNull subdocument; + } + + @Data + @RequiredArgsConstructor + static class SubdocumentWithWriteNull { + + final String firstname, lastname; + + @Field(write = Field.Write.ALWAYS) String nickname; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java index f8170889b6..c3ee9b32ff 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java @@ -1139,7 +1139,7 @@ void geoNearShouldMapGeoJsonPointCorrectly() { .containsEntry("$geoNear.near.coordinates.[1]", 2D); } - @Test // DATAMONGO-2155 + @Test // DATAMONGO-2155, GH-3407 void saveVersionedEntityShouldCallUpdateCorrectly() { when(updateResult.getModifiedCount()).thenReturn(1L); @@ -1157,7 +1157,7 @@ void saveVersionedEntityShouldCallUpdateCorrectly() { assertThat(queryCaptor.getValue()).isEqualTo(new Document("_id", 1).append("version", 10)); assertThat(updateCaptor.getValue()) - .isEqualTo(new Document("version", 11).append("_class", VersionedEntity.class.getName())); + .isEqualTo(new Document("version", 11).append("_class", VersionedEntity.class.getName()).append("name", null)); } @Test // DATAMONGO-1783 @@ -2273,6 +2273,8 @@ static class VersionedEntity { @Id Integer id; @Version Integer version; + + @Field(write = Field.Write.ALWAYS) String name; } enum MyConverter implements Converter { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index 2009b5314b..0361571414 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -2520,6 +2520,18 @@ void usesCustomConverterForPropertiesUsingTypesImplementingMapOnRead() { assertThat(target.typeImplementingMap).isEqualTo(new TypeImplementingMap("one", 2)); } + @Test // GH-3407 + void shouldWriteNullPropertyCorrectly() { + + WithFieldWrite fieldWrite = new WithFieldWrite(); + + org.bson.Document document = new org.bson.Document(); + converter.write(fieldWrite, document); + + assertThat(document).containsEntry("writeAlways", null).doesNotContainKey("writeNonNull"); + assertThat(document).containsEntry("writeAlwaysPerson", null).doesNotContainKey("writeNonNullPerson"); + } + static class GenericType { T content; } @@ -3165,4 +3177,20 @@ public Set> entrySet() { return null; } } + + static class WithFieldWrite { + + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.NON_NULL) Integer writeNonNull; + + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.ALWAYS) Integer writeAlways; + + @org.springframework.data.mongodb.core.mapping.DBRef @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.NON_NULL) Person writeNonNullPerson; + + @org.springframework.data.mongodb.core.mapping.DBRef @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.ALWAYS) Person writeAlwaysPerson; + + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java index 69f9cb6613..bbcb8dada0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java @@ -48,25 +48,26 @@ * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch + * @author Divya Srivastava */ public class BasicMongoPersistentPropertyUnitTests { - MongoPersistentEntity entity; + private MongoPersistentEntity entity; @BeforeEach - public void setup() { + void setup() { entity = new BasicMongoPersistentEntity<>(ClassTypeInformation.from(Person.class)); } @Test - public void usesAnnotatedFieldName() { + void usesAnnotatedFieldName() { Field field = ReflectionUtils.findField(Person.class, "firstname"); assertThat(getPropertyFor(field).getFieldName()).isEqualTo("foo"); } @Test - public void returns_IdForIdProperty() { + void returns_IdForIdProperty() { Field field = ReflectionUtils.findField(Person.class, "id"); MongoPersistentProperty property = getPropertyFor(field); assertThat(property.isIdProperty()).isTrue(); @@ -74,19 +75,19 @@ public void returns_IdForIdProperty() { } @Test - public void returnsPropertyNameForUnannotatedProperties() { + void returnsPropertyNameForUnannotatedProperties() { Field field = ReflectionUtils.findField(Person.class, "lastname"); assertThat(getPropertyFor(field).getFieldName()).isEqualTo("lastname"); } @Test - public void preventsNegativeOrder() { + void preventsNegativeOrder() { getPropertyFor(ReflectionUtils.findField(Person.class, "ssn")); } @Test // DATAMONGO-553 - public void usesPropertyAccessForThrowableCause() { + void usesPropertyAccessForThrowableCause() { BasicMongoPersistentEntity entity = new BasicMongoPersistentEntity<>( ClassTypeInformation.from(Throwable.class)); @@ -96,7 +97,7 @@ public void usesPropertyAccessForThrowableCause() { } @Test // DATAMONGO-607 - public void usesCustomFieldNamingStrategyByDefault() throws Exception { + void usesCustomFieldNamingStrategyByDefault() throws Exception { ClassTypeInformation type = ClassTypeInformation.from(Person.class); Field field = ReflectionUtils.findField(Person.class, "lastname"); @@ -113,7 +114,7 @@ public void usesCustomFieldNamingStrategyByDefault() throws Exception { } @Test // DATAMONGO-607 - public void rejectsInvalidValueReturnedByFieldNamingStrategy() { + void rejectsInvalidValueReturnedByFieldNamingStrategy() { ClassTypeInformation type = ClassTypeInformation.from(Person.class); Field field = ReflectionUtils.findField(Person.class, "lastname"); @@ -126,49 +127,42 @@ public void rejectsInvalidValueReturnedByFieldNamingStrategy() { } @Test // DATAMONGO-937 - public void shouldDetectAnnotatedLanguagePropertyCorrectly() { + void shouldDetectAnnotatedLanguagePropertyCorrectly() { MongoPersistentProperty property = getPropertyFor(DocumentWithLanguageProperty.class, "lang"); assertThat(property.isLanguageProperty()).isTrue(); } @Test // DATAMONGO-937 - public void shouldDetectImplicitLanguagePropertyCorrectly() { + void shouldDetectImplicitLanguagePropertyCorrectly() { MongoPersistentProperty property = getPropertyFor(DocumentWithImplicitLanguageProperty.class, "language"); assertThat(property.isLanguageProperty()).isTrue(); } @Test // DATAMONGO-976 - public void shouldDetectTextScorePropertyCorrectly() { + void shouldDetectTextScorePropertyCorrectly() { MongoPersistentProperty property = getPropertyFor(DocumentWithTextScoreProperty.class, "score"); assertThat(property.isTextScoreProperty()).isTrue(); } - @Test // DATAMONGO-2551 - public void shouldDetectOmittableOnNullPropertyCorrectly() { - - MongoPersistentProperty property = getPropertyFor(DocumentWithOmittableOnNullProperty.class, "write"); - assertThat(property.isPropertyOmittableOnNull()).isTrue(); - } - @Test // DATAMONGO-976 - public void shouldDetectTextScoreAsReadOnlyProperty() { + void shouldDetectTextScoreAsReadOnlyProperty() { MongoPersistentProperty property = getPropertyFor(DocumentWithTextScoreProperty.class, "score"); assertThat(property.isWritable()).isFalse(); } @Test // DATAMONGO-1050 - public void shouldNotConsiderExplicitlyNameFieldAsIdProperty() { + void shouldNotConsiderExplicitlyNameFieldAsIdProperty() { MongoPersistentProperty property = getPropertyFor(DocumentWithExplicitlyRenamedIdProperty.class, "id"); assertThat(property.isIdProperty()).isFalse(); } @Test // DATAMONGO-1050 - public void shouldConsiderPropertyAsIdWhenExplicitlyAnnotatedWithIdEvenWhenExplicitlyNamePresent() { + void shouldConsiderPropertyAsIdWhenExplicitlyAnnotatedWithIdEvenWhenExplicitlyNamePresent() { MongoPersistentProperty property = getPropertyFor(DocumentWithExplicitlyRenamedIdPropertyHavingIdAnnotation.class, "id"); @@ -176,7 +170,7 @@ public void shouldConsiderPropertyAsIdWhenExplicitlyAnnotatedWithIdEvenWhenExpli } @Test // DATAMONGO-1373 - public void shouldConsiderComposedAnnotationsForIdField() { + void shouldConsiderComposedAnnotationsForIdField() { MongoPersistentProperty property = getPropertyFor(DocumentWithComposedAnnotations.class, "myId"); assertThat(property.isIdProperty()).isTrue(); @@ -184,14 +178,14 @@ public void shouldConsiderComposedAnnotationsForIdField() { } @Test // DATAMONGO-1373 - public void shouldConsiderComposedAnnotationsForFields() { + void shouldConsiderComposedAnnotationsForFields() { MongoPersistentProperty property = getPropertyFor(DocumentWithComposedAnnotations.class, "myField"); assertThat(property.getFieldName()).isEqualTo("myField"); } @Test // DATAMONGO-1737 - public void honorsFieldOrderWhenIteratingOverProperties() { + void honorsFieldOrderWhenIteratingOverProperties() { MongoMappingContext context = new MongoMappingContext(); MongoPersistentEntity entity = context.getPersistentEntity(Sample.class); @@ -203,36 +197,45 @@ public void honorsFieldOrderWhenIteratingOverProperties() { assertThat(properties).containsExactly("first", "second", "third"); } + @Test // GH-3407 + void shouldDetectWritability() { + + assertThat(getPropertyFor(WithFieldWrite.class, "fieldWithDefaults").writeNullValues()).isFalse(); + assertThat(getPropertyFor(WithFieldWrite.class, "fieldWithField").writeNullValues()).isFalse(); + assertThat(getPropertyFor(WithFieldWrite.class, "writeNonNull").writeNullValues()).isFalse(); + assertThat(getPropertyFor(WithFieldWrite.class, "writeAlways").writeNullValues()).isTrue(); + } + @Test // DATAMONGO-1798 - public void fieldTypeShouldReturnActualTypeForNonIdProperties() { + void fieldTypeShouldReturnActualTypeForNonIdProperties() { MongoPersistentProperty property = getPropertyFor(Person.class, "lastname"); assertThat(property.getFieldType()).isEqualTo(String.class); } @Test // DATAMONGO-1798 - public void fieldTypeShouldBeObjectIdForPropertiesAnnotatedWithCommonsId() { + void fieldTypeShouldBeObjectIdForPropertiesAnnotatedWithCommonsId() { MongoPersistentProperty property = getPropertyFor(Person.class, "id"); assertThat(property.getFieldType()).isEqualTo(ObjectId.class); } @Test // DATAMONGO-1798 - public void fieldTypeShouldBeImplicitForPropertiesAnnotatedWithMongoId() { + void fieldTypeShouldBeImplicitForPropertiesAnnotatedWithMongoId() { MongoPersistentProperty property = getPropertyFor(WithStringMongoId.class, "id"); assertThat(property.getFieldType()).isEqualTo(String.class); } @Test // DATAMONGO-1798 - public void fieldTypeShouldBeObjectIdForPropertiesAnnotatedWithMongoIdAndTargetTypeObjectId() { + void fieldTypeShouldBeObjectIdForPropertiesAnnotatedWithMongoIdAndTargetTypeObjectId() { MongoPersistentProperty property = getPropertyFor(WithStringMongoIdMappedToObjectId.class, "id"); assertThat(property.getFieldType()).isEqualTo(ObjectId.class); } @Test // DATAMONGO-2460 - public void fieldTypeShouldBeDocumentForPropertiesAnnotatedIdWhenAComplexTypeAndFieldTypeImplicit() { + void fieldTypeShouldBeDocumentForPropertiesAnnotatedIdWhenAComplexTypeAndFieldTypeImplicit() { MongoPersistentProperty property = getPropertyFor(WithComplexId.class, "id"); assertThat(property.getFieldType()).isEqualTo(Document.class); @@ -304,9 +307,15 @@ static class DocumentWithTextScoreProperty { @TextScore Float score; } - static class DocumentWithOmittableOnNullProperty { + static class WithFieldWrite { + + int fieldWithDefaults; + @org.springframework.data.mongodb.core.mapping.Field int fieldWithField; + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.NON_NULL) Integer writeNonNull; - @org.springframework.data.mongodb.core.mapping.Field("write") org.springframework.data.mongodb.core.mapping.Field.Write write; + @org.springframework.data.mongodb.core.mapping.Field( + write = org.springframework.data.mongodb.core.mapping.Field.Write.ALWAYS) Integer writeAlways; } diff --git a/src/main/asciidoc/new-features.adoc b/src/main/asciidoc/new-features.adoc index eac49f37bc..a74594bff0 100644 --- a/src/main/asciidoc/new-features.adoc +++ b/src/main/asciidoc/new-features.adoc @@ -5,6 +5,7 @@ == What's New in Spring Data MongoDB 3.3 * Extended support for <> entities. +* Include/exclude `null` properties on write to `Document` through `@Field(write=…)`. [[new-features.3.2]] == What's New in Spring Data MongoDB 3.2 From 98fe043b95c524ec5b6603b34268b474dd9107b3 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Thu, 17 Jun 2021 09:41:46 +0200 Subject: [PATCH 029/983] Directly import JSR305 jar. Closes #3672 --- spring-data-mongodb/pom.xml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index c1efaea420..1f157e75bc 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -87,6 +87,13 @@ true + + com.google.code.findbugs + jsr305 + 3.0.2 + true + + From 3872b379cd9d88cc62b76ddae0d2f45219445f66 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 26 Apr 2021 06:09:09 +0200 Subject: [PATCH 030/983] Fix $or / $nor keyword mapping in query mapper. This commit fixes an issue with the pattern used for detecting $or / $nor which also matched other keywords like $floor. Closes: #3635 Original pull request: #3637. --- .../data/mongodb/core/convert/QueryMapper.java | 3 +-- .../data/mongodb/core/convert/QueryMapperUnitTests.java | 8 ++++++++ 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index 5840815562..0127216e5c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -797,7 +797,6 @@ private Object applyFieldTargetTypeHintToValue(Field documentField, @Nullable Ob */ static class Keyword { - private static final String N_OR_PATTERN = "\\$.*or"; private static final Set NON_DBREF_CONVERTING_KEYWORDS = new HashSet<>( Arrays.asList("$", "$size", "$slice", "$gt", "$lt")); @@ -828,7 +827,7 @@ public boolean isExists() { } public boolean isOrOrNor() { - return key.matches(N_OR_PATTERN); + return key.equalsIgnoreCase("$or") || key.equalsIgnoreCase("$nor"); } /** diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index dd8dd25e8f..166c602be9 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -1277,6 +1277,14 @@ void mapsNullBsonTypeForFieldWithCustomTargetType() { assertThat(document).isEqualTo(new org.bson.Document("stringAsOid", new org.bson.Document("$type", 10))); } + @Test // GH-3635 + void $floorKeywordDoesNotMatch$or$norPattern() { + + Query query = new BasicQuery(" { $expr: { $gt: [ \"$spent\" , { $floor : \"$budget\" } ] } }"); + assertThatNoException() + .isThrownBy(() -> mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class))); + } + class WithDeepArrayNesting { List level0; From a1c165921d578598a6209e76b02961529cfd0170 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 31 May 2021 10:22:03 +0200 Subject: [PATCH 031/983] Fix query mapper path resolution for types considered simple ones. spring-projects/spring-data-commons#2293 changed how PersistentProperty paths get resolved and considers potentially registered converters for those, which made the path resolution fail in during the query mapping process. This commit makes sure to capture the according exception and continue with the given user input. Fixes: #3659 Original pull request: #3661. --- .../mongodb/core/convert/QueryMapper.java | 59 +++++++++++++------ .../core/convert/QueryMapperUnitTests.java | 51 +++++++++++++--- 2 files changed, 83 insertions(+), 27 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index 0127216e5c..b619779590 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -19,11 +19,14 @@ import java.util.Map.Entry; import java.util.regex.Matcher; import java.util.regex.Pattern; +import java.util.stream.Collectors; import org.bson.BsonValue; import org.bson.Document; import org.bson.conversions.Bson; import org.bson.types.ObjectId; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.core.convert.ConversionService; import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Reference; @@ -69,6 +72,8 @@ */ public class QueryMapper { + protected static final Logger LOGGER = LoggerFactory.getLogger(QueryMapper.class); + private static final List DEFAULT_ID_NAMES = Arrays.asList("id", "_id"); private static final Document META_TEXT_SCORE = new Document("$meta", "textScore"); static final ClassTypeInformation NESTED_DOCUMENT = ClassTypeInformation.from(NestedDocument.class); @@ -677,7 +682,8 @@ private Object createReferenceFor(Object source, MongoPersistentProperty propert return (DBRef) source; } - if(property != null && (property.isDocumentReference() || (!property.isDbReference() && property.findAnnotation(Reference.class) != null))) { + if (property != null && (property.isDocumentReference() + || (!property.isDbReference() && property.findAnnotation(Reference.class) != null))) { return converter.toDocumentPointer(source, property).getPointer(); } @@ -1178,8 +1184,8 @@ private PersistentPropertyPath getPath(String pathExpre removePlaceholders(DOT_POSITIONAL_PATTERN, pathExpression)); if (sourceProperty != null && sourceProperty.getOwner().equals(entity)) { - return mappingContext - .getPersistentPropertyPath(PropertyPath.from(Pattern.quote(sourceProperty.getName()), entity.getTypeInformation())); + return mappingContext.getPersistentPropertyPath( + PropertyPath.from(Pattern.quote(sourceProperty.getName()), entity.getTypeInformation())); } PropertyPath path = forName(rawPath); @@ -1187,29 +1193,46 @@ private PersistentPropertyPath getPath(String pathExpre return null; } - try { + PersistentPropertyPath propertyPath = tryToResolvePersistentPropertyPath(path); - PersistentPropertyPath propertyPath = mappingContext.getPersistentPropertyPath(path); + if (propertyPath == null) { - Iterator iterator = propertyPath.iterator(); - boolean associationDetected = false; + if (QueryMapper.LOGGER.isInfoEnabled()) { + + String types = StringUtils.collectionToDelimitedString( + path.stream().map(it -> it.getType().getSimpleName()).collect(Collectors.toList()), " -> "); + QueryMapper.LOGGER.info( + "Could not map '{}'. Maybe a fragment in '{}' is considered a simple type. Mapper continues with {}.", + path, types, pathExpression); + } + return null; + } - while (iterator.hasNext()) { + Iterator iterator = propertyPath.iterator(); + boolean associationDetected = false; - MongoPersistentProperty property = iterator.next(); + while (iterator.hasNext()) { - if (property.isAssociation()) { - associationDetected = true; - continue; - } + MongoPersistentProperty property = iterator.next(); - if (associationDetected && !property.isIdProperty()) { - throw new MappingException(String.format(INVALID_ASSOCIATION_REFERENCE, pathExpression)); - } + if (property.isAssociation()) { + associationDetected = true; + continue; } - return propertyPath; - } catch (InvalidPersistentPropertyPath e) { + if (associationDetected && !property.isIdProperty()) { + throw new MappingException(String.format(INVALID_ASSOCIATION_REFERENCE, pathExpression)); + } + } + + return propertyPath; + } + + private PersistentPropertyPath tryToResolvePersistentPropertyPath(PropertyPath path) { + + try { + return mappingContext.getPersistentPropertyPath(path); + } catch (MappingException e) { return null; } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index 166c602be9..e2a65d5322 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -28,6 +28,7 @@ import java.util.Map; import java.util.Optional; +import lombok.Data; import org.bson.conversions.Bson; import org.bson.types.Code; import org.bson.types.ObjectId; @@ -35,9 +36,10 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.junit.jupiter.MockitoExtension; - +import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; import org.springframework.data.annotation.Transient; +import org.springframework.data.convert.WritingConverter; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.geo.Point; @@ -52,6 +54,7 @@ import org.springframework.data.mongodb.core.mapping.FieldType; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.TextScore; import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.data.mongodb.core.query.BasicQuery; @@ -1285,6 +1288,26 @@ void mapsNullBsonTypeForFieldWithCustomTargetType() { .isThrownBy(() -> mapper.getMappedObject(query.getQueryObject(), context.getPersistentEntity(Foo.class))); } + @Test // GH-3659 + void allowsUsingFieldPathsForPropertiesHavingCustomConversionRegistered() { + + Query query = query(where("address.street").is("1007 Mountain Drive")); + + MongoCustomConversions mongoCustomConversions = new MongoCustomConversions(Collections.singletonList(new MyAddressToDocumentConverter())); + + this.context = new MongoMappingContext(); + this.context.setSimpleTypeHolder(mongoCustomConversions.getSimpleTypeHolder()); + this.context.afterPropertiesSet(); + + this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); + this.converter.setCustomConversions(mongoCustomConversions); + this.converter.afterPropertiesSet(); + + this.mapper = new QueryMapper(converter); + + assertThat(mapper.getMappedSort(query.getQueryObject(), context.getPersistentEntity(Customer.class))).isEqualTo(new org.bson.Document("address.street", "1007 Mountain Drive")); + } + class WithDeepArrayNesting { List level0; @@ -1516,17 +1539,27 @@ static class WithPropertyUsingUnderscoreInName { String renamed_fieldname_with_underscores; } - static class WithDocumentReferences { + @Document + static class Customer { - @DocumentReference - Sample sample; + @Id + private ObjectId id; + private String name; + private MyAddress address; + } - @DocumentReference - SimpeEntityWithoutId noId; + static class MyAddress { + private String street; + } - @DocumentReference(lookup = "{ 'stringProperty' : ?#{stringProperty} }") - SimpeEntityWithoutId noIdButLookupQuery; + @WritingConverter + public static class MyAddressToDocumentConverter implements Converter { + @Override + public org.bson.Document convert(MyAddress address) { + org.bson.Document doc = new org.bson.Document(); + doc.put("street", address.street); + return doc; + } } - } From 73a0f0493358dae7040ff3613524ca1450e2a585 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 1 Jun 2021 09:25:15 +0200 Subject: [PATCH 032/983] Polishing. Fix typo in class name and make sure MongoTestTemplate uses the configured simple types. Remove superfluous junit extension. See: #3659 Original pull request: #3661. --- .../data/mongodb/core/convert/QueryMapper.java | 1 + .../mongodb/core/convert/QueryMapperUnitTests.java | 10 +++------- .../test/util/MongoTestTemplateConfiguration.java | 3 +++ 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index b619779590..0392dc5426 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -1228,6 +1228,7 @@ private PersistentPropertyPath getPath(String pathExpre return propertyPath; } + @Nullable private PersistentPropertyPath tryToResolvePersistentPropertyPath(PropertyPath path) { try { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index e2a65d5322..770f8ebe20 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -28,7 +28,6 @@ import java.util.Map; import java.util.Optional; -import lombok.Data; import org.bson.conversions.Bson; import org.bson.types.Code; import org.bson.types.ObjectId; @@ -49,12 +48,10 @@ import org.springframework.data.mongodb.core.geo.GeoJsonPolygon; import org.springframework.data.mongodb.core.mapping.DBRef; import org.springframework.data.mongodb.core.mapping.Document; -import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.FieldType; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; -import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.TextScore; import org.springframework.data.mongodb.core.mapping.Unwrapped; import org.springframework.data.mongodb.core.query.BasicQuery; @@ -75,7 +72,6 @@ * @author Christoph Strobl * @author Mark Paluch */ -@ExtendWith(MockitoExtension.class) public class QueryMapperUnitTests { private QueryMapper mapper; @@ -1447,18 +1443,18 @@ static class ClassWithGeoTypes { @Field("geoJsonPointWithNameViaFieldAnnotation") GeoJsonPoint namedGeoJsonPoint; } - static class SimpeEntityWithoutId { + static class SimpleEntityWithoutId { String stringProperty; Integer integerProperty; } static class EntityWithComplexValueTypeMap { - Map map; + Map map; } static class EntityWithComplexValueTypeList { - List list; + List list; } static class WithExplicitTargetTypes { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java index 0f90bd2b9c..2d2dedc2ee 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java @@ -119,6 +119,9 @@ MongoMappingContext mappingContext() { mappingContext = new MongoMappingContext(); mappingContext.setInitialEntitySet(mappingContextConfigurer.initialEntitySet()); mappingContext.setAutoIndexCreation(mappingContextConfigurer.autocreateIndex); + if(mongoConverterConfigurer.customConversions != null) { + mappingContext.setSimpleTypeHolder(mongoConverterConfigurer.customConversions.getSimpleTypeHolder()); + } mappingContext.afterPropertiesSet(); } From 7dfe4604336ab79a1ee7cde721254be6a5e41906 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 21 Jun 2021 12:24:02 +0200 Subject: [PATCH 033/983] Fix field projection value conversion. The field projection conversion should actually only map field names and avoid value conversion. In the MongoId case an inclusion parameter (1) was unintentionally converted into its String representation which causes trouble on Mongo 4.4 servers. Fixes: #3668 Original pull request: #3678. --- .../mongodb/core/convert/QueryMapper.java | 37 +++++++++++-------- .../core/convert/QueryMapperUnitTests.java | 14 +++++++ 2 files changed, 35 insertions(+), 16 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index 0392dc5426..08f6458e95 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -195,19 +195,7 @@ public Document getMappedSort(Document sortObject, @Nullable MongoPersistentEnti return new Document(); } - sortObject = filterUnwrappedObjects(sortObject, entity); - - Document mappedSort = new Document(); - for (Map.Entry entry : BsonUtils.asMap(sortObject).entrySet()) { - - Field field = createPropertyField(entity, entry.getKey(), mappingContext); - if (field.getProperty() != null && field.getProperty().isUnwrapped()) { - continue; - } - - mappedSort.put(field.getMappedKey(), entry.getValue()); - } - + Document mappedSort = mapFieldsToPropertyNames(sortObject, entity); mapMetaAttributes(mappedSort, entity, MetaMapping.WHEN_PRESENT); return mappedSort; } @@ -225,13 +213,30 @@ public Document getMappedFields(Document fieldsObject, @Nullable MongoPersistent Assert.notNull(fieldsObject, "FieldsObject must not be null!"); - fieldsObject = filterUnwrappedObjects(fieldsObject, entity); - - Document mappedFields = getMappedObject(fieldsObject, entity); + Document mappedFields = mapFieldsToPropertyNames(fieldsObject, entity); mapMetaAttributes(mappedFields, entity, MetaMapping.FORCE); return mappedFields; } + private Document mapFieldsToPropertyNames(Document fields, @Nullable MongoPersistentEntity entity) { + + if (fields.isEmpty()) { + return new Document(); + + } + Document target = new Document(); + for (Map.Entry entry : BsonUtils.asMap(filterUnwrappedObjects(fields, entity)).entrySet()) { + + Field field = createPropertyField(entity, entry.getKey(), mappingContext); + if (field.getProperty() != null && field.getProperty().isUnwrapped()) { + continue; + } + + target.put(field.getMappedKey(), entry.getValue()); + } + return target; + } + private void mapMetaAttributes(Document source, @Nullable MongoPersistentEntity entity, MetaMapping metaMapping) { if (entity == null) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index 770f8ebe20..ba883d14c4 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -50,6 +50,7 @@ import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.FieldType; +import org.springframework.data.mongodb.core.mapping.MongoId; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.TextScore; @@ -1304,6 +1305,13 @@ void allowsUsingFieldPathsForPropertiesHavingCustomConversionRegistered() { assertThat(mapper.getMappedSort(query.getQueryObject(), context.getPersistentEntity(Customer.class))).isEqualTo(new org.bson.Document("address.street", "1007 Mountain Drive")); } + @Test // GH-3668 + void mapStringIdFieldProjection() { + + org.bson.Document mappedFields = mapper.getMappedFields(new org.bson.Document("id", 1), context.getPersistentEntity(WithStringId.class)); + assertThat(mappedFields).containsEntry("_id", 1); + } + class WithDeepArrayNesting { List level0; @@ -1367,6 +1375,12 @@ class Sample { @Id private String foo; } + class WithStringId { + + @MongoId String id; + String name; + } + class BigIntegerId { @Id private BigInteger id; From 9dda0a2f9374b5f6f7d72f9a06a06cfd4f7204e1 Mon Sep 17 00:00:00 2001 From: larsw Date: Mon, 21 Jun 2021 13:58:07 +0200 Subject: [PATCH 034/983] Add closing quote to GeoJson javadoc. Closes #3677 --- .../java/org/springframework/data/mongodb/core/geo/GeoJson.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJson.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJson.java index 3c659f7cc1..b033d2b6d5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJson.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJson.java @@ -16,7 +16,7 @@ package org.springframework.data.mongodb.core.geo; /** - * Interface definition for structures defined in GeoJSON format. * * @author Christoph Strobl * @since 1.7 From 826015e9c1e749c51d23dddfff8bd4b5187145bf Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 22 Jun 2021 14:36:49 +0200 Subject: [PATCH 035/983] Update reference docs to use correct MongoClient. Closes #3666 --- src/main/asciidoc/reference/mongodb.adoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc index 7e0ef51328..fb35bb655b 100644 --- a/src/main/asciidoc/reference/mongodb.adoc +++ b/src/main/asciidoc/reference/mongodb.adoc @@ -214,7 +214,7 @@ public class AppConfig { ---- ==== -To access the `com.mongodb.client.MongoClient` object created by the `MongoClientFactoryBean` in other `@Configuration` classes or your own classes, use a `private @Autowired Mongo mongo;` field. +To access the `com.mongodb.client.MongoClient` object created by the `MongoClientFactoryBean` in other `@Configuration` classes or your own classes, use a `private @Autowired MongoClient mongoClient;` field. [[mongo.mongo-xml-config]] === Registering a Mongo Instance by Using XML-based Metadata From 2a5ae0da37262762965c176cf9b7016606730ce4 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 22 Jun 2021 15:17:49 +0200 Subject: [PATCH 036/983] Updated changelog. See #3649 --- src/main/resources/changelog.txt | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/main/resources/changelog.txt b/src/main/resources/changelog.txt index 3de91d4a95..e611a3a5f8 100644 --- a/src/main/resources/changelog.txt +++ b/src/main/resources/changelog.txt @@ -1,6 +1,15 @@ Spring Data MongoDB Changelog ============================= +Changes in version 3.1.10 (2021-06-22) +-------------------------------------- +* #3677 - Add missing double quote to GeoJson.java JSDoc header. +* #3666 - Documentation references outdated `Mongo` client. +* #3659 - [3.2.1] Indexing Class with Custom Converter -> Couldn't find PersistentEntity for property private [...]. +* #3635 - $floor isOrOrNor() return true. +* #3633 - NPE in QueryMapper when use Query with `null` as value. + + Changes in version 3.2.1 (2021-05-14) ------------------------------------- * #3638 - Introduce template method for easier customization of fragments. @@ -3442,5 +3451,6 @@ Repository + From c70c29b2c7443296a6cb80db7c29cbdc188a46c6 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 22 Jun 2021 15:51:33 +0200 Subject: [PATCH 037/983] Updated changelog. See #3650 --- src/main/resources/changelog.txt | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/main/resources/changelog.txt b/src/main/resources/changelog.txt index e611a3a5f8..18ac26a430 100644 --- a/src/main/resources/changelog.txt +++ b/src/main/resources/changelog.txt @@ -1,6 +1,17 @@ Spring Data MongoDB Changelog ============================= +Changes in version 3.2.2 (2021-06-22) +------------------------------------- +* #3677 - Add missing double quote to GeoJson.java JSDoc header. +* #3668 - Projection on the _id field returns wrong result when using `@MongoId` (MongoDB 4.4). +* #3666 - Documentation references outdated `Mongo` client. +* #3660 - MappingMongoConverter problem: ConversionContext#convert does not try to use custom converters first. +* #3659 - [3.2.1] Indexing Class with Custom Converter -> Couldn't find PersistentEntity for property private [...]. +* #3635 - $floor isOrOrNor() return true. +* #3633 - NPE in QueryMapper when use Query with `null` as value. + + Changes in version 3.1.10 (2021-06-22) -------------------------------------- * #3677 - Add missing double quote to GeoJson.java JSDoc header. @@ -3452,5 +3463,6 @@ Repository + From 85a30ec91597a451c4c1c04b41a1e36995e1a727 Mon Sep 17 00:00:00 2001 From: Gatto Date: Wed, 23 Jun 2021 21:07:29 -0300 Subject: [PATCH 038/983] Add equals and hashCode to UnwrappedMongoPersistentProperty. Fixes #3683 Original Pull Request: #3684 --- .../core/mapping/UnwrapEntityContext.java | 33 +++++++++++++++++++ .../UnwrappedMongoPersistentProperty.java | 31 +++++++++++++++++ 2 files changed, 64 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrapEntityContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrapEntityContext.java index f4f54e9578..81e2926058 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrapEntityContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrapEntityContext.java @@ -15,8 +15,11 @@ */ package org.springframework.data.mongodb.core.mapping; +import java.util.Objects; + /** * @author Christoph Strobl + * @author Rogério Meneguelli Gatto * @since 3.2 */ class UnwrapEntityContext { @@ -30,4 +33,34 @@ public UnwrapEntityContext(MongoPersistentProperty property) { public MongoPersistentProperty getProperty() { return property; } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + return Objects.hash(property); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + UnwrapEntityContext other = (UnwrapEntityContext) obj; + + return Objects.equals(property, other.property); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java index 6d4b163d22..0570a582b1 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java @@ -18,6 +18,7 @@ import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.lang.reflect.Method; +import java.util.Objects; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.PersistentEntity; @@ -29,6 +30,7 @@ * Unwrapped variant of {@link MongoPersistentProperty}. * * @author Christoph Strobl + * @author Rogério Meneguelli Gatto * @since 3.2 * @see Unwrapped */ @@ -321,4 +323,33 @@ public PersistentPropertyAccessor getAccessorForOwner(T owner) { return delegate.getAccessorForOwner(owner); } + /* + * (non-Javadoc) + * + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + return Objects.hash(delegate, context); + } + + /* + * (non-Javadoc) + * + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + UnwrappedMongoPersistentProperty other = (UnwrappedMongoPersistentProperty) obj; + + return Objects.equals(delegate, other.delegate) && Objects.equals(context, other.context); + } } From 82d67c1dbbab41c2b58ffa624a1a6925ba1b1091 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Thu, 24 Jun 2021 13:22:38 +0200 Subject: [PATCH 039/983] Favor ObjectUtils over Objects for equals/hashCode. Original Pull Request: #3684 --- .../core/mapping/UnwrapEntityContext.java | 26 +++++++-------- .../UnwrappedMongoPersistentProperty.java | 32 +++++++++++-------- 2 files changed, 30 insertions(+), 28 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrapEntityContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrapEntityContext.java index 81e2926058..5240aef5c4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrapEntityContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrapEntityContext.java @@ -15,7 +15,7 @@ */ package org.springframework.data.mongodb.core.mapping; -import java.util.Objects; +import org.springframework.util.ObjectUtils; /** * @author Christoph Strobl @@ -36,21 +36,11 @@ public MongoPersistentProperty getProperty() { /* * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - return Objects.hash(property); - } - - /* - * (non-Javadoc) - * * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { + if (this == obj) { return true; } @@ -59,8 +49,16 @@ public boolean equals(Object obj) { return false; } - UnwrapEntityContext other = (UnwrapEntityContext) obj; + UnwrapEntityContext that = (UnwrapEntityContext) obj; + return ObjectUtils.nullSafeEquals(property, that.property); + } - return Objects.equals(property, other.property); + /* + * (non-Javadoc) + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + return ObjectUtils.nullSafeHashCode(property); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java index 0570a582b1..8f24bab61b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java @@ -18,13 +18,13 @@ import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.lang.reflect.Method; -import java.util.Objects; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.PersistentEntity; import org.springframework.data.mapping.PersistentPropertyAccessor; import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; /** * Unwrapped variant of {@link MongoPersistentProperty}. @@ -325,21 +325,11 @@ public PersistentPropertyAccessor getAccessorForOwner(T owner) { /* * (non-Javadoc) - * - * @see java.lang.Object#hashCode() - */ - @Override - public int hashCode() { - return Objects.hash(delegate, context); - } - - /* - * (non-Javadoc) - * * @see java.lang.Object#equals(java.lang.Object) */ @Override public boolean equals(Object obj) { + if (this == obj) { return true; } @@ -348,8 +338,22 @@ public boolean equals(Object obj) { return false; } - UnwrappedMongoPersistentProperty other = (UnwrappedMongoPersistentProperty) obj; + UnwrappedMongoPersistentProperty that = (UnwrappedMongoPersistentProperty) obj; + if (!ObjectUtils.nullSafeEquals(delegate, that.delegate)) { + return false; + } + return ObjectUtils.nullSafeEquals(context, that.context); + } + + /* + * (non-Javadoc) + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { - return Objects.equals(delegate, other.delegate) && Objects.equals(context, other.context); + int result = ObjectUtils.nullSafeHashCode(delegate); + result = 31 * result + ObjectUtils.nullSafeHashCode(context); + return result; } } From 61d3a0bd1f8e51490dc40e7ff679a4e5b605c7b7 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Thu, 1 Jul 2021 08:50:24 +0200 Subject: [PATCH 040/983] Fix NPE when reading/mapping null value inside collection. Closes: #3686 --- .../core/convert/MappingMongoConverter.java | 3 +- .../core/convert/ReferenceLookupDelegate.java | 3 +- .../MappingMongoConverterUnitTests.java | 39 +++++++++++++++++++ 3 files changed, 43 insertions(+), 2 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 732e8c9a51..aced009cda 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -1269,7 +1269,7 @@ protected Object readCollectionOrArray(ConversionContext context, Collection } for (Object element : source) { - items.add(context.convert(element, componentType)); + items.add(element != null ? context.convert(element, componentType) : element); } return getPotentiallyConvertedSimpleRead(items, targetType.getType()); @@ -2013,6 +2013,7 @@ protected static class ConversionContext { @SuppressWarnings("unchecked") public S convert(Object source, TypeInformation typeHint) { + Assert.notNull(source, "Source must not be null"); Assert.notNull(typeHint, "TypeInformation must not be null"); if (conversions.hasCustomReadTarget(source.getClass(), typeHint.getType())) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java index 616abb325e..3ca730452f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java @@ -110,7 +110,8 @@ public Object readReference(MongoPersistentProperty property, Object value, Look return null; } - return entityReader.read(result.iterator().next(), property.getTypeInformation()); + Object resultValue = result.iterator().next(); + return resultValue != null ? entityReader.read(resultValue, property.getTypeInformation()) : null; } private ReferenceCollection computeReferenceContext(MongoPersistentProperty property, Object value, diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index 0361571414..c8e2fec155 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -2532,6 +2532,41 @@ void shouldWriteNullPropertyCorrectly() { assertThat(document).containsEntry("writeAlwaysPerson", null).doesNotContainKey("writeNonNullPerson"); } + @Test // GH-3686 + void readsCollectionContainingNullValue() { + + org.bson.Document source = new org.bson.Document("items", Arrays.asList(new org.bson.Document("itemKey", "i1"), null, new org.bson.Document("itemKey", "i3"))); + + Order target = converter.read(Order.class, source); + + assertThat(target.items) + .map(it -> it != null ? it.itemKey : null) + .containsExactly("i1", null, "i3"); + } + + @Test // GH-3686 + void readsArrayContainingNullValue() { + + org.bson.Document source = new org.bson.Document("arrayOfStrings", Arrays.asList("i1", null, "i3")); + + WithArrays target = converter.read(WithArrays.class, source); + + assertThat(target.arrayOfStrings).containsExactly("i1", null, "i3"); + } + + @Test // GH-3686 + void readsMapContainingNullValue() { + + org.bson.Document source = new org.bson.Document("mapOfObjects", new org.bson.Document("item1", "i1").append("item2", null).append("item3", "i3")); + + ClassWithMapProperty target = converter.read(ClassWithMapProperty.class, source); + + assertThat(target.mapOfObjects) + .containsEntry("item1", "i1") + .containsEntry("item2", null) + .containsEntry("item3", "i3"); + } + static class GenericType { T content; } @@ -2893,6 +2928,10 @@ static class WithArrayInConstructor { } + static class WithArrays { + String[] arrayOfStrings; + } + // DATAMONGO-1898 // DATACMNS-1278 From 5cffb3c07c3fa015479d032dbe7d6aec80a31808 Mon Sep 17 00:00:00 2001 From: David Julia Date: Sun, 27 Jun 2021 23:08:43 +1000 Subject: [PATCH 041/983] Fix Regression in generating queries with nested maps with numeric keys. While maps that have numeric keys work if there is only one map with an integer key, when there are multiple maps with numeric keys in a given query, it fails. Take the following example for a map called outer with numeric keys holding reference to another object with a map called inner with numeric keys: Updates that are meant to generate {"$set": {"outerMap.1234.inner.5678": "hello"}} are instead generating {"$set": {"outerMap.1234.inner.inner": "hello"}}, repeating the later map property name instead of using the integer key value. This commit adds unit tests both for the UpdateMapper and QueryMapper, which check multiple consecutive maps with numeric keys, and adds a fix in the KeyMapper. Because we cannot easily change the path parsing to somehow parse path parts corresponding to map keys differently, we address the issue in the KeyMapper. We keep track of the partial path corresponding to the current property and use it to skip adding the duplicated property name for the map to the query, and instead add the key. This is a bit redundant in that we now have both an iterator and an index-based way of accessing the path parts, but it gets the tests passing and fixes the issue without making a large change to the current approach. Fixes: #3688 Original Pull Request: #3689 --- .../mongodb/core/convert/QueryMapper.java | 22 ++++++++++++--- .../core/convert/QueryMapperUnitTests.java | 27 +++++++++++++++++++ .../core/convert/UpdateMapperUnitTests.java | 15 +++++++++++ 3 files changed, 61 insertions(+), 3 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index 08f6458e95..e1682fa6e9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -69,6 +69,7 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author David Julia */ public class QueryMapper { @@ -1367,11 +1368,17 @@ public TypeInformation getTypeHint() { static class KeyMapper { private final Iterator iterator; + private int currentIndex; + private String currentPropertyRoot; + private final List pathParts; public KeyMapper(String key, MappingContext, MongoPersistentProperty> mappingContext) { - this.iterator = Arrays.asList(key.split("\\.")).iterator(); + this.pathParts = Arrays.asList(key.split("\\.")); + this.currentPropertyRoot = pathParts.get(0); + this.currentIndex = 0; + this.iterator = pathParts.iterator(); this.iterator.next(); } @@ -1389,16 +1396,25 @@ protected String mapPropertyName(MongoPersistentProperty property) { while (inspect) { String partial = iterator.next(); + currentIndex++; - boolean isPositional = isPositionalParameter(partial) && property.isCollectionLike(); + boolean isPositional = isPositionalParameter(partial) && property.isCollectionLike() ; + if(property.isMap() && currentPropertyRoot.equals(partial) && iterator.hasNext()){ + partial = iterator.next(); + currentIndex++; + } - if (isPositional || property.isMap()) { + if (isPositional || property.isMap() && !currentPropertyRoot.equals(partial)) { mappedName.append(".").append(partial); } inspect = isPositional && iterator.hasNext(); } + if(currentIndex + 1 < pathParts.size()) { + currentIndex++; + currentPropertyRoot = pathParts.get(currentIndex); + } return mappedName.toString(); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index ba883d14c4..efd354b866 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -72,6 +72,7 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author David Julia */ public class QueryMapperUnitTests { @@ -730,6 +731,28 @@ void mappingShouldRetainNumericMapKey() { assertThat(document).containsKey("map.1.stringProperty"); } + @Test // GH-3688 + void mappingShouldRetainNestedNumericMapKeys() { + + Query query = query(where("outerMap.1.map.2.stringProperty").is("ba'alzamon")); + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(EntityWithIntKeyedMapOfMap.class)); + + assertThat(document).containsKey("outerMap.1.map.2.stringProperty"); + } + + @Test // GH-3688 + void mappingShouldAllowSettingEntireNestedNumericKeyedMapValue() { + + Query query = query(where("outerMap.1.map").is(null)); //newEntityWithComplexValueTypeMap() + + org.bson.Document document = mapper.getMappedObject(query.getQueryObject(), + context.getPersistentEntity(EntityWithIntKeyedMapOfMap.class)); + + assertThat(document).containsKey("outerMap.1.map"); + } + @Test // DATAMONGO-1269 void mappingShouldRetainNumericPositionInList() { @@ -1467,6 +1490,10 @@ static class EntityWithComplexValueTypeMap { Map map; } + static class EntityWithIntKeyedMapOfMap{ + Map outerMap; + } + static class EntityWithComplexValueTypeList { List list; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java index a8d5f12b9f..bba9811e56 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java @@ -67,6 +67,7 @@ * @author Thomas Darimont * @author Mark Paluch * @author Pavel Vodrazka + * @author David Julia */ @ExtendWith(MockitoExtension.class) class UpdateMapperUnitTests { @@ -1179,6 +1180,16 @@ void numericKeyInMapOfNestedPath() { assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"map.601218778970110001827396.value\": \"testing\"}}"); } + @Test // GH-3688 + void multipleNumericKeysInNestedPath() { + + Update update = new Update().set("intKeyedMap.12345.map.0", "testing"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithIntKeyedMap.class)); + + assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"intKeyedMap.12345.map.0\": \"testing\"}}"); + } + @Test // GH-3566 void mapsObjectClassPropertyFieldInMapValueTypeAsKey() { @@ -1425,6 +1436,10 @@ static class EntityWithObjectMap { Map concreteMap; } + static class EntityWithIntKeyedMap{ + Map intKeyedMap; + } + static class ClassWithEnum { Allocation allocation; From ef29e69a87022db0ca0e475dc4b276dccab0597d Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 5 Jul 2021 10:28:39 +0200 Subject: [PATCH 042/983] Polishing. Simplify KeyMapper current property/index setup. Original Pull Request: #3689 --- .../data/mongodb/core/convert/QueryMapper.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index e1682fa6e9..7a14f07c4c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -1376,10 +1376,9 @@ public KeyMapper(String key, MappingContext, MongoPersistentProperty> mappingContext) { this.pathParts = Arrays.asList(key.split("\\.")); - this.currentPropertyRoot = pathParts.get(0); - this.currentIndex = 0; this.iterator = pathParts.iterator(); - this.iterator.next(); + this.currentPropertyRoot = iterator.next(); + this.currentIndex = 0; } /** @@ -1391,6 +1390,7 @@ public KeyMapper(String key, protected String mapPropertyName(MongoPersistentProperty property) { StringBuilder mappedName = new StringBuilder(PropertyToFieldNameConverter.INSTANCE.convert(property)); + boolean inspect = iterator.hasNext(); while (inspect) { From 4f65bb0810cceeaf1fbf783aecd13c3ebff20aab Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 22 Jun 2021 14:03:52 +0200 Subject: [PATCH 043/983] Fix mapping context setup to include simple type holder. Original pull request: #3687. Resolves #3656 --- .../data/mongodb/core/MongoTemplateUnitTests.java | 1 + .../data/mongodb/core/ReactiveMongoTemplateUnitTests.java | 2 ++ .../core/convert/DbRefMappingMongoConverterUnitTests.java | 1 + .../data/mongodb/core/convert/MappingMongoConverterTests.java | 2 ++ .../mongodb/core/convert/MappingMongoConverterUnitTests.java | 1 + .../data/mongodb/core/query/BasicQueryUnitTests.java | 4 ++++ .../mongodb/test/util/MongoTestTemplateConfiguration.java | 2 ++ 7 files changed, 13 insertions(+) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java index c3ee9b32ff..68c83a2757 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java @@ -206,6 +206,7 @@ void beforeEach() { this.mappingContext = new MongoMappingContext(); mappingContext.setAutoIndexCreation(true); + mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); mappingContext.afterPropertiesSet(); this.converter = spy(new MappingMongoConverter(new DefaultDbRefResolver(factory), mappingContext)); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java index 931ea75cea..5c5a307f1d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java @@ -23,6 +23,7 @@ import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; @@ -189,6 +190,7 @@ void beforeEach() { when(aggregatePublisher.first()).thenReturn(findPublisher); this.mappingContext = new MongoMappingContext(); + this.mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); this.template = new ReactiveMongoTemplate(factory, converter); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java index d5285e7d2e..b677d90df5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java @@ -85,6 +85,7 @@ void setUp() { this.dbRefResolver = spy(new DefaultDbRefResolver(dbFactory)); this.mappingContext = new MongoMappingContext(); + this.mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); this.converter = new MappingMongoConverter(dbRefResolver, mappingContext); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java index 2b17ed4b06..c9a4937125 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterTests.java @@ -31,6 +31,7 @@ import java.time.ZoneId; import java.time.temporal.ChronoUnit; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -80,6 +81,7 @@ void setUp() { dbRefResolver = spy(new DefaultDbRefResolver(factory)); mappingContext = new MongoMappingContext(); + mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); mappingContext.setInitialEntitySet(new HashSet<>( Arrays.asList(WithLazyDBRefAsConstructorArg.class, WithLazyDBRef.class, WithJavaTimeTypes.class))); mappingContext.setAutoIndexCreation(false); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index c8e2fec155..369f6dbdef 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -1702,6 +1702,7 @@ void convertsJava8DateTimeTypesToDateAndBack() { } @Test // DATAMONGO-1128 + @Disabled("really we should find a solution for this") void writesOptionalsCorrectly() { TypeWithOptional type = new TypeWithOptional(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicQueryUnitTests.java index 1a7477f099..d9ef1cce30 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/BasicQueryUnitTests.java @@ -24,6 +24,9 @@ import org.bson.Document; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledForJreRange; +import org.junit.jupiter.api.condition.DisabledOnJre; +import org.junit.jupiter.api.condition.JRE; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; @@ -64,6 +67,7 @@ public void overridesSortCorrectly() { } @Test // DATAMONGO-1093 + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "EqualsVerifier uses reflection on Optional") public void equalsContract() { BasicQuery query1 = new BasicQuery("{ \"name\" : \"Thomas\"}", "{\"name\":1, \"age\":1}"); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java index 2d2dedc2ee..ee75da8b19 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java @@ -121,6 +121,8 @@ MongoMappingContext mappingContext() { mappingContext.setAutoIndexCreation(mappingContextConfigurer.autocreateIndex); if(mongoConverterConfigurer.customConversions != null) { mappingContext.setSimpleTypeHolder(mongoConverterConfigurer.customConversions.getSimpleTypeHolder()); + } else { + mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); } mappingContext.afterPropertiesSet(); } From 403f0019d55dd5b70d7132549f82acb8ae16d6bb Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 23 Jun 2021 15:45:14 +0200 Subject: [PATCH 044/983] Fix Optional handling in query creation and result processing. Original pull request: #3687. Resolves #3656 --- .../core/mapping/MongoMappingContext.java | 6 ++++ .../mapping/MongoMappingContextUnitTests.java | 31 +++++++++++++++++++ ...tractPersonRepositoryIntegrationTests.java | 13 ++++++++ .../mongodb/repository/PersonRepository.java | 2 ++ .../repository/PersonSummaryWithOptional.java | 24 ++++++++++++++ 5 files changed, 76 insertions(+) create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryWithOptional.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java index c204434809..38381fb994 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java @@ -26,6 +26,7 @@ import org.springframework.data.mapping.model.Property; import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.util.NullableWrapperConverters; import org.springframework.data.util.TypeInformation; import org.springframework.lang.Nullable; @@ -69,6 +70,11 @@ public void setFieldNamingStrategy(@Nullable FieldNamingStrategy fieldNamingStra */ @Override protected boolean shouldCreatePersistentEntityFor(TypeInformation type) { + + if(NullableWrapperConverters.supports(type.getType())) { + return false; + } + return !MongoSimpleTypes.HOLDER.isSimpleType(type.getType()) && !AbstractMap.class.isAssignableFrom(type.getType()); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoMappingContextUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoMappingContextUnitTests.java index a7e454c52a..f0cc1a2f32 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoMappingContextUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/MongoMappingContextUnitTests.java @@ -22,6 +22,7 @@ import java.util.Collections; import java.util.Locale; import java.util.Map; +import java.util.Optional; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -35,6 +36,7 @@ import org.springframework.data.mapping.model.FieldNamingStrategy; import com.mongodb.DBRef; +import org.springframework.data.util.TypeInformation; /** * Unit tests for {@link MongoMappingContext}. @@ -173,6 +175,26 @@ void shouldNotCreateEntityForEnum() { assertThat(context.getPersistentEntity(ChronoUnit.class)).isNull(); } + @Test // GH-3656 + void shouldNotCreateEntityForOptionalGetter() { + + MongoMappingContext context = new MongoMappingContext(); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(InterfaceWithMethodReturningOptional.class); + + assertThat(context.getPersistentEntities()).map(it -> it.getType()).doesNotContain((Class) + Optional.class).contains((Class)Person.class); + } + + @Test // GH-3656 + void shouldNotCreateEntityForOptionalField() { + + MongoMappingContext context = new MongoMappingContext(); + MongoPersistentEntity entity = context.getRequiredPersistentEntity(ClassWithOptionalField.class); + + assertThat(context.getPersistentEntities()).map(it -> it.getType()).doesNotContain((Class) + Optional.class).contains((Class)Person.class); + } + public class SampleClass { Map children; @@ -244,4 +266,13 @@ class ClassWithChronoUnit { ChronoUnit unit; } + + interface InterfaceWithMethodReturningOptional { + + Optional getPerson(); + } + + class ClassWithOptionalField { + Optional person; + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java index d576913850..e462458ae8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/AbstractPersonRepositoryIntegrationTests.java @@ -1460,4 +1460,17 @@ void executesQueryWithDocumentReferenceCorrectly() { List result = repository.findBySpiritAnimal(dave); assertThat(result).map(Person::getId).containsExactly(josh.getId()); } + + @Test //GH-3656 + void resultProjectionWithOptionalIsExcecutedCorrectly() { + + carter.setAddress(new Address("batman", "robin", "gotham")); + repository.save(carter); + + PersonSummaryWithOptional result = repository.findSummaryWithOptionalByLastname("Beauford"); + + assertThat(result).isNotNull(); + assertThat(result.getAddress()).isPresent(); + assertThat(result.getFirstname()).contains("Carter"); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java index 155cf7a7b9..9ac1282088 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepository.java @@ -307,6 +307,8 @@ Page findByCustomQueryLastnameAndAddressStreetInList(String lastname, Li // DATAMONGO-1030 PersonSummaryDto findSummaryByLastname(String lastname); + PersonSummaryWithOptional findSummaryWithOptionalByLastname(String lastname); + @Query("{ ?0 : ?1 }") List findByKeyValue(String key, String value); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryWithOptional.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryWithOptional.java new file mode 100644 index 0000000000..d6a98752bb --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonSummaryWithOptional.java @@ -0,0 +1,24 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.util.Optional; + +public interface PersonSummaryWithOptional { + + Optional

        getAddress(); + Optional getFirstname(); +} From 81bc3c599b1dc0bcc03bf409c92eb7a6fb814e7d Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Fri, 25 Jun 2021 13:40:55 +0200 Subject: [PATCH 045/983] Disable tests on Java 16 that require class-based proxies. Original pull request: #3687. Resolves #3656 --- .../core/mapping/MongoMappingContext.java | 4 ++-- .../DbRefMappingMongoConverterUnitTests.java | 6 ++++++ ...RepositoryLazyLoadingIntegrationTests.java | 20 +++++++++---------- .../reference/document-references.adoc | 6 ++++++ 4 files changed, 24 insertions(+), 12 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java index 38381fb994..121658b065 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java @@ -71,7 +71,7 @@ public void setFieldNamingStrategy(@Nullable FieldNamingStrategy fieldNamingStra @Override protected boolean shouldCreatePersistentEntityFor(TypeInformation type) { - if(NullableWrapperConverters.supports(type.getType())) { + if (NullableWrapperConverters.supports(type.getType())) { return false; } @@ -139,7 +139,7 @@ public MongoPersistentEntity getPersistentEntity(MongoPersistentProperty pers MongoPersistentEntity entity = super.getPersistentEntity(persistentProperty); - if(entity == null || !persistentProperty.isUnwrapped()) { + if (entity == null || !persistentProperty.isUnwrapped()) { return entity; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java index b677d90df5..56ab37a0f6 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/DbRefMappingMongoConverterUnitTests.java @@ -36,6 +36,8 @@ import org.bson.types.ObjectId; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledForJreRange; +import org.junit.jupiter.api.condition.JRE; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.Mockito; @@ -181,6 +183,7 @@ void lazyLoadingProxyForLazyDbRefOnInterface() { } @Test // DATAMONGO-348 + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg. ArrayList require to open java.util.") void lazyLoadingProxyForLazyDbRefOnConcreteCollection() { String id = "42"; @@ -508,6 +511,7 @@ void shouldNotEagerlyResolveIdPropertyWithPropertyAccess() { } @Test // DATAMONGO-1076 + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg. ArrayList require to open java.util.") void shouldNotTriggerResolvingOfLazyLoadedProxyWhenFinalizeMethodIsInvoked() throws Exception { MongoPersistentEntity entity = mappingContext @@ -526,6 +530,7 @@ void shouldNotTriggerResolvingOfLazyLoadedProxyWhenFinalizeMethodIsInvoked() thr } @Test // DATAMONGO-1194 + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg. ArrayList require to open java.util.") void shouldBulkFetchListOfReferences() { String id1 = "1"; @@ -576,6 +581,7 @@ void shouldBulkFetchSetOfReferencesForConstructorCreation() { } @Test // DATAMONGO-1194 + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg. ArrayList require to open java.util.") void shouldFallbackToOneByOneFetchingWhenElementsInListOfReferencesPointToDifferentCollections() { String id1 = "1"; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryLazyLoadingIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryLazyLoadingIntegrationTests.java index 5cc8e82599..4ca82abf57 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryLazyLoadingIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/PersonRepositoryLazyLoadingIntegrationTests.java @@ -22,14 +22,15 @@ import java.util.Arrays; import java.util.List; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; - +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledForJreRange; +import org.junit.jupiter.api.condition.JRE; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; /** * Integration test for {@link PersonRepository} for lazy loading support. @@ -38,13 +39,13 @@ * @author Oliver Gierke */ @ContextConfiguration(locations = "PersonRepositoryIntegrationTests-context.xml") -@RunWith(SpringRunner.class) +@ExtendWith(SpringExtension.class) public class PersonRepositoryLazyLoadingIntegrationTests { @Autowired PersonRepository repository; @Autowired MongoOperations operations; - @Before + @BeforeEach public void setUp() throws InterruptedException { repository.deleteAll(); @@ -61,7 +62,6 @@ public void shouldLoadAssociationWithDbRefOnInterfaceAndLazyLoadingEnabled() thr Person person = new Person(); person.setFirstname("Oliver"); person.setFans(Arrays.asList(thomas)); - person.setRealFans(new ArrayList(Arrays.asList(thomas))); repository.save(person); Person oliver = repository.findById(person.id).get(); @@ -75,7 +75,8 @@ public void shouldLoadAssociationWithDbRefOnInterfaceAndLazyLoadingEnabled() thr } @Test // DATAMONGO-348 - public void shouldLoadAssociationWithDbRefOnConcreteCollectionAndLazyLoadingEnabled() throws Exception { + @DisabledForJreRange(min = JRE.JAVA_16, disabledReason = "Class Proxies for eg. ArrayList require to open java.util.") + public void shouldLoadAssociationWithDbRefOnConcreteCollectionAndLazyLoadingEnabled() { User thomas = new User(); thomas.username = "Thomas"; @@ -83,7 +84,6 @@ public void shouldLoadAssociationWithDbRefOnConcreteCollectionAndLazyLoadingEnab Person person = new Person(); person.setFirstname("Oliver"); - person.setFans(Arrays.asList(thomas)); person.setRealFans(new ArrayList(Arrays.asList(thomas))); repository.save(person); diff --git a/src/main/asciidoc/reference/document-references.adoc b/src/main/asciidoc/reference/document-references.adoc index 92badd2fa1..885d2d6ade 100644 --- a/src/main/asciidoc/reference/document-references.adoc +++ b/src/main/asciidoc/reference/document-references.adoc @@ -49,6 +49,9 @@ TIP: Lazily loaded ``DBRef``s can be hard to debug. Make sure tooling does not accidentally trigger proxy resolution by e.g. calling `toString()` or some inline debug rendering invoking property getters. Please consider to enable _trace_ logging for `org.springframework.data.mongodb.core.convert.DefaultDbRefResolver` to gain insight on `DBRef` resolution. +CAUTION: Lazy loading may require class proxies, that in turn, might need access to jdk internals, that are not open, starting with Java 16+, due to https://openjdk.java.net/jeps/396[JEP 396: Strongly Encapsulate JDK Internals by Default]. +For those cases please consider falling back to an interface type (eg. switch from `ArrayList` to `List`) or provide the required `--add-opens` argument. + [[mapping-usage.document-references]] === Using Document References @@ -136,6 +139,9 @@ Result order of `Collection` like properties is restored based on the used looku | Resolves properties eagerly by default. |=== +CAUTION: Lazy loading may require class proxies, that in turn, might need access to jdk internals, that are not open, starting with Java 16+, due to https://openjdk.java.net/jeps/396[JEP 396: Strongly Encapsulate JDK Internals by Default]. +For those cases please consider falling back to an interface type (eg. switch from `ArrayList` to `List`) or provide the required `--add-opens` argument. + `@DocumentReference(lookup)` allows defining filter queries that can be different from the `_id` field and therefore offer a flexible way of defining references between entities as demonstrated in the sample below, where the `Publisher` of a book is referenced by its acronym instead of the internal `id`. ==== From 7d0b070d1f8110b9ff5d976e55e49c30794e95f9 Mon Sep 17 00:00:00 2001 From: Oliver Drotbohm Date: Fri, 9 Jul 2021 15:26:44 +0200 Subject: [PATCH 046/983] Adapt to API consolidation in Spring Data Commons' PersistentProperty. Closes: #3700 Original Pull Request: #3701 Related to: spring-projects/spring-data-commons#2408 --- .../core/mapping/UnwrappedMongoPersistentProperty.java | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java index 8f24bab61b..a2194c173f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java @@ -147,6 +147,11 @@ public Iterable> getPersistentEntityTypes() { return delegate.getPersistentEntityTypes(); } + @Override + public Iterable> getPersistentEntityTypeInformation() { + return delegate.getPersistentEntityTypeInformation(); + } + @Override @Nullable public Method getGetter() { @@ -318,6 +323,11 @@ public Class getAssociationTargetType() { return delegate.getAssociationTargetType(); } + @Override + public TypeInformation getAssociationTargetTypeInformation() { + return delegate.getAssociationTargetTypeInformation(); + } + @Override public PersistentPropertyAccessor getAccessorForOwner(T owner) { return delegate.getAccessorForOwner(owner); From a6a2f0bde902e31ff41070e24a20c1af03b60edb Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 12 Jul 2021 14:56:44 +0200 Subject: [PATCH 047/983] Upgrade to MongoDB 4.3.0-beta4 Drivers. Closes: #3693 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index a6d5da9170..98de438544 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ multi spring-data-mongodb 2.6.0-SNAPSHOT - 4.2.3 + 4.3.0-beta4 ${mongo} 1.19 From 42ab7d2f6378488d8f23be6f056748c9eb406f19 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 13 Jul 2021 08:27:51 +0200 Subject: [PATCH 048/983] Adapt to changes in AssertJ 3.20. Closes #3705 --- .../mongodb/test/util/DocumentAssert.java | 49 +++++-------------- 1 file changed, 11 insertions(+), 38 deletions(-) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DocumentAssert.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DocumentAssert.java index db33b29b82..6f519c71e2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DocumentAssert.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/DocumentAssert.java @@ -134,12 +134,12 @@ public DocumentAssert containsKey(String key) { return containsKeys(key); } - /* + /* * (non-Javadoc) - * @see org.assertj.core.api.AbstractMapAssert#containsKeys(java.lang.Object[]) + * @see org.assertj.core.api.AbstractMapAssert#containsKeysForProxy(java.lang.Object[]) */ @Override - public final DocumentAssert containsKeys(String... keys) { + protected DocumentAssert containsKeysForProxy(String[] keys) { Set notFound = new LinkedHashSet<>(); @@ -166,12 +166,12 @@ public DocumentAssert doesNotContainKey(String key) { return doesNotContainKeys(key); } - /* + /* * (non-Javadoc) - * @see org.assertj.core.api.AbstractMapAssert#doesNotContainKeys(java.lang.Object[]) + * @see org.assertj.core.api.AbstractMapAssert#doesNotContainKeysForProxy(java.lang.Object[]) */ @Override - public final DocumentAssert doesNotContainKeys(String... keys) { + protected DocumentAssert doesNotContainKeysForProxy(String[] keys) { Set found = new LinkedHashSet<>(); for (String key : keys) { @@ -191,13 +191,8 @@ public final DocumentAssert doesNotContainKeys(String... keys) { // used in soft assertions which need to be able to proxy method - @SafeVarargs requiring method to be final prevents // using proxies. - /* - * (non-Javadoc) - * @see org.assertj.core.api.AbstractMapAssert#contains(java.util.Map.Entry[]) - */ - @SafeVarargs @Override - public final DocumentAssert contains(Map.Entry... entries) { + protected DocumentAssert containsForProxy(Entry[] entries) { // if both actual and values are empty, then assertion passes. if (actual.isEmpty() && entries.length == 0) { @@ -216,14 +211,8 @@ public final DocumentAssert contains(Map.Entry... entries) { - + protected DocumentAssert containsAnyOfForProxy(Entry[] entries) { for (Map.Entry entry : entries) { if (containsEntry(entry)) { return myself; @@ -233,24 +222,13 @@ public final DocumentAssert containsAnyOf(Map.Entry... entries) { + protected DocumentAssert containsOnlyForProxy(Entry[] entries) { throw new UnsupportedOperationException(); } - /* - * (non-Javadoc) - * @see org.assertj.core.api.AbstractMapAssert#doesNotContain(java.util.Map.Entry[]) - */ - @SafeVarargs @Override - public final DocumentAssert doesNotContain(Map.Entry... entries) { - + protected DocumentAssert doesNotContainForProxy(Entry[] entries) { Set> found = new LinkedHashSet<>(); for (Map.Entry entry : entries) { @@ -265,13 +243,8 @@ public final DocumentAssert doesNotContain(Map.Entry... entries) { + protected DocumentAssert containsExactlyForProxy(Entry[] entries) { throw new UnsupportedOperationException(); } From 93b9f23b077cd60c43b22aa66b7431c2500ccf6a Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 13 Jul 2021 08:42:24 +0200 Subject: [PATCH 049/983] Polishing. Fix proxy comparison. See #3705 --- .../data/mongodb/core/convert/LazyLoadingProxyFactory.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java index f77b96c71f..887ddfe78d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/LazyLoadingProxyFactory.java @@ -167,7 +167,7 @@ public Object intercept(Object o, Method method, Object[] args, MethodProxy prox } if (ReflectionUtils.isEqualsMethod(method)) { - return proxyEquals(proxy, args[0]); + return proxyEquals(o, args[0]); } if (ReflectionUtils.isHashCodeMethod(method)) { From 5bd7ff141355e98843a0eca990a6018458dc6557 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 14 Jul 2021 07:44:00 +0200 Subject: [PATCH 050/983] Upgrade to MongoDB 4.3.0 Drivers. Closes: #3706 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 98de438544..b688f3ee50 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ multi spring-data-mongodb 2.6.0-SNAPSHOT - 4.3.0-beta4 + 4.3.0 ${mongo} 1.19 From 986ea39f902f65653c77ab32153c97ff8f7efb7e Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Thu, 17 Jun 2021 10:50:13 +0200 Subject: [PATCH 051/983] Upgrade to Querydsl 5.0. Move off our own Querydsl copies, as Querydsl 5.0 ships MongoDB Document API support. Remove package-private duplicates of Querydsl code. Introduce SpringDataMongodbQuerySupport to provide a well-formatted toString representation of the actual query. Original Pull Request: #3674 --- .../support/MongodbDocumentSerializer.java | 459 ------------------ .../support/QuerydslAbstractMongodbQuery.java | 32 +- .../support/QuerydslAnyEmbeddedBuilder.java | 3 + .../QuerydslFetchableMongodbQuery.java | 272 ----------- .../support/QuerydslJoinBuilder.java | 67 --- .../repository/support/QuerydslMongoOps.java | 43 -- .../ReactiveSpringDataMongodbQuery.java | 79 ++- .../support/SpringDataMongodbQuery.java | 199 +++++++- .../SpringDataMongodbQuerySupport.java | 147 ++++++ .../support/SpringDataMongodbSerializer.java | 2 + .../SimpleReactiveMongoRepositoryTests.java | 3 +- 11 files changed, 383 insertions(+), 923 deletions(-) delete mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongodbDocumentSerializer.java delete mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslFetchableMongodbQuery.java delete mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslJoinBuilder.java delete mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoOps.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongodbDocumentSerializer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongodbDocumentSerializer.java deleted file mode 100644 index 3f0d281cc4..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongodbDocumentSerializer.java +++ /dev/null @@ -1,459 +0,0 @@ -/* - * Copyright 2018-2021 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.repository.support; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Queue; -import java.util.Set; -import java.util.regex.Pattern; - -import org.bson.BsonJavaScript; -import org.bson.BsonRegularExpression; -import org.bson.Document; -import org.bson.types.ObjectId; -import org.springframework.lang.Nullable; -import org.springframework.util.Assert; - -import com.mongodb.DBRef; -import com.querydsl.core.types.*; -import com.querydsl.mongodb.MongodbOps; - -/** - *

        - * Serializes the given Querydsl query to a Document query for MongoDB. - *

        - *

        - * Original implementation source {@link com.querydsl.mongodb.MongodbSerializer} by {@literal The Querydsl Team} - * (http://www.querydsl.com/team) licensed under the Apache License, Version - * 2.0. - *

        - * Modified to use {@link Document} instead of {@link com.mongodb.DBObject}, updated nullable types and code format. Use - * Bson specific types and add {@link QuerydslMongoOps#NO_MATCH}. - * - * @author laimw - * @author Mark Paluch - * @author Christoph Strobl - * @author Mikhail Kaduchka - * @author Enrique Leon Molina - * @since 2.1 - */ -abstract class MongodbDocumentSerializer implements Visitor { - - @Nullable - Object handle(Expression expression) { - return expression.accept(this, null); - } - - /** - * Create the MongoDB specific query document. - * - * @param predicate must not be {@literal null}. - * @return empty {@link Document} by default. - */ - Document toQuery(Predicate predicate) { - - Object value = handle(predicate); - - if (value == null) { - return new Document(); - } - - Assert.isInstanceOf(Document.class, value, - () -> String.format("Invalid type. Expected Document but found %s", value.getClass())); - - return (Document) value; - } - - /** - * Create the MongoDB specific sort document. - * - * @param orderBys must not be {@literal null}. - * @return empty {@link Document} by default. - */ - Document toSort(List> orderBys) { - - Document sort = new Document(); - - orderBys.forEach(orderSpecifier -> { - - Object key = orderSpecifier.getTarget().accept(this, null); - - Assert.notNull(key, () -> String.format("Mapped sort key for %s must not be null!", orderSpecifier)); - sort.append(key.toString(), orderSpecifier.getOrder() == Order.ASC ? 1 : -1); - }); - - return sort; - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.types.Visitor#visit(com.querydsl.core.types.Constant, java.lang.Void) - */ - @Override - public Object visit(Constant expr, Void context) { - - if (!Enum.class.isAssignableFrom(expr.getType())) { - return expr.getConstant(); - } - - @SuppressWarnings("unchecked") // Guarded by previous check - Constant> expectedExpr = (Constant>) expr; - return expectedExpr.getConstant().name(); - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.types.Visitor#visit(com.querydsl.core.types.TemplateExpression, java.lang.Void) - */ - @Override - public Object visit(TemplateExpression expr, Void context) { - throw new UnsupportedOperationException(); - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.types.Visitor#visit(com.querydsl.core.types.FactoryExpression, java.lang.Void) - */ - @Override - public Object visit(FactoryExpression expr, Void context) { - throw new UnsupportedOperationException(); - } - - protected String asDBKey(Operation expr, int index) { - - String key = (String) asDBValue(expr, index); - - Assert.hasText(key, () -> String.format("Mapped key must not be null nor empty for expression %s.", expr)); - return key; - } - - @Nullable - protected Object asDBValue(Operation expr, int index) { - return expr.getArg(index).accept(this, null); - } - - private String regexValue(Operation expr, int index) { - - Object value = expr.getArg(index).accept(this, null); - - Assert.notNull(value, () -> String.format("Regex for %s must not be null.", expr)); - return Pattern.quote(value.toString()); - } - - protected Document asDocument(String key, @Nullable Object value) { - return new Document(key, value); - } - - @SuppressWarnings("unchecked") - @Override - public Object visit(Operation expr, Void context) { - - Operator op = expr.getOperator(); - if (op == Ops.EQ) { - - if (expr.getArg(0) instanceof Operation) { - Operation lhs = (Operation) expr.getArg(0); - if (lhs.getOperator() == Ops.COL_SIZE || lhs.getOperator() == Ops.ARRAY_SIZE) { - return asDocument(asDBKey(lhs, 0), asDocument("$size", asDBValue(expr, 1))); - } else { - throw new UnsupportedOperationException("Illegal operation " + expr); - } - } else if (expr.getArg(0) instanceof Path) { - Path path = (Path) expr.getArg(0); - Constant constant = (Constant) expr.getArg(1); - return asDocument(asDBKey(expr, 0), convert(path, constant)); - } - } else if (op == Ops.STRING_IS_EMPTY) { - return asDocument(asDBKey(expr, 0), ""); - } else if (op == Ops.AND) { - - Queue> pendingDocuments = collectConnectorArgs("$and", expr); - List> unmergeableDocuments = new ArrayList<>(); - List> generatedDocuments = new ArrayList<>(); - - while (!pendingDocuments.isEmpty()) { - - Map lhs = pendingDocuments.poll(); - - for (Map rhs : pendingDocuments) { - Set lhs2 = new LinkedHashSet<>(lhs.keySet()); - lhs2.retainAll(rhs.keySet()); - if (lhs2.isEmpty()) { - lhs.putAll(rhs); - } else { - unmergeableDocuments.add(rhs); - } - } - - generatedDocuments.add(lhs); - pendingDocuments = new LinkedList<>(unmergeableDocuments); - unmergeableDocuments = new LinkedList<>(); - } - - return generatedDocuments.size() == 1 ? generatedDocuments.get(0) : asDocument("$and", generatedDocuments); - } else if (op == Ops.NOT) { - // Handle the not's child - Operation subOperation = (Operation) expr.getArg(0); - Operator subOp = subOperation.getOperator(); - if (subOp == Ops.IN) { - return visit( - ExpressionUtils.operation(Boolean.class, Ops.NOT_IN, subOperation.getArg(0), subOperation.getArg(1)), - context); - } else { - Document arg = (Document) handle(expr.getArg(0)); - return negate(arg); - } - - } else if (op == Ops.OR) { - return asDocument("$or", collectConnectorArgs("$or", expr)); - } else if (op == Ops.NE) { - - Path path = (Path) expr.getArg(0); - Constant constant = (Constant) expr.getArg(1); - return asDocument(asDBKey(expr, 0), asDocument("$ne", convert(path, constant))); - - } else if (op == Ops.STARTS_WITH) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression("^" + regexValue(expr, 1))); - } else if (op == Ops.STARTS_WITH_IC) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression("^" + regexValue(expr, 1), "i")); - } else if (op == Ops.ENDS_WITH) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression(regexValue(expr, 1) + "$")); - } else if (op == Ops.ENDS_WITH_IC) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression(regexValue(expr, 1) + "$", "i")); - } else if (op == Ops.EQ_IGNORE_CASE) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression("^" + regexValue(expr, 1) + "$", "i")); - } else if (op == Ops.STRING_CONTAINS) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression(".*" + regexValue(expr, 1) + ".*")); - } else if (op == Ops.STRING_CONTAINS_IC) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression(".*" + regexValue(expr, 1) + ".*", "i")); - } else if (op == Ops.MATCHES) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression(asDBValue(expr, 1).toString())); - } else if (op == Ops.MATCHES_IC) { - return asDocument(asDBKey(expr, 0), new BsonRegularExpression(asDBValue(expr, 1).toString(), "i")); - } else if (op == Ops.LIKE) { - - String regex = ExpressionUtils.likeToRegex((Expression) expr.getArg(1)).toString(); - return asDocument(asDBKey(expr, 0), new BsonRegularExpression(regex)); - } else if (op == Ops.BETWEEN) { - - Document value = new Document("$gte", asDBValue(expr, 1)); - value.append("$lte", asDBValue(expr, 2)); - return asDocument(asDBKey(expr, 0), value); - } else if (op == Ops.IN) { - - int constIndex = 0; - int exprIndex = 1; - if (expr.getArg(1) instanceof Constant) { - constIndex = 1; - exprIndex = 0; - } - if (Collection.class.isAssignableFrom(expr.getArg(constIndex).getType())) { - @SuppressWarnings("unchecked") // guarded by previous check - Collection values = ((Constant>) expr.getArg(constIndex)).getConstant(); - return asDocument(asDBKey(expr, exprIndex), asDocument("$in", values)); - } else { - Path path = (Path) expr.getArg(exprIndex); - Constant constant = (Constant) expr.getArg(constIndex); - return asDocument(asDBKey(expr, exprIndex), convert(path, constant)); - } - } else if (op == Ops.NOT_IN) { - - int constIndex = 0; - int exprIndex = 1; - if (expr.getArg(1) instanceof Constant) { - - constIndex = 1; - exprIndex = 0; - } - if (Collection.class.isAssignableFrom(expr.getArg(constIndex).getType())) { - - @SuppressWarnings("unchecked") // guarded by previous check - Collection values = ((Constant>) expr.getArg(constIndex)).getConstant(); - return asDocument(asDBKey(expr, exprIndex), asDocument("$nin", values)); - } else { - - Path path = (Path) expr.getArg(exprIndex); - Constant constant = (Constant) expr.getArg(constIndex); - return asDocument(asDBKey(expr, exprIndex), asDocument("$ne", convert(path, constant))); - } - } else if (op == Ops.COL_IS_EMPTY) { - - List list = new ArrayList<>(2); - list.add(asDocument(asDBKey(expr, 0), new ArrayList())); - list.add(asDocument(asDBKey(expr, 0), asDocument("$exists", false))); - return asDocument("$or", list); - } else if (op == Ops.LT) { - return asDocument(asDBKey(expr, 0), asDocument("$lt", asDBValue(expr, 1))); - } else if (op == Ops.GT) { - return asDocument(asDBKey(expr, 0), asDocument("$gt", asDBValue(expr, 1))); - } else if (op == Ops.LOE) { - return asDocument(asDBKey(expr, 0), asDocument("$lte", asDBValue(expr, 1))); - } else if (op == Ops.GOE) { - return asDocument(asDBKey(expr, 0), asDocument("$gte", asDBValue(expr, 1))); - } else if (op == Ops.IS_NULL) { - return asDocument(asDBKey(expr, 0), asDocument("$exists", false)); - } else if (op == Ops.IS_NOT_NULL) { - return asDocument(asDBKey(expr, 0), asDocument("$exists", true)); - } else if (op == Ops.CONTAINS_KEY) { - - Path path = (Path) expr.getArg(0); - Expression key = expr.getArg(1); - return asDocument(visit(path, context) + "." + key.toString(), asDocument("$exists", true)); - } else if (op == MongodbOps.NEAR) { - return asDocument(asDBKey(expr, 0), asDocument("$near", asDBValue(expr, 1))); - } else if (op == MongodbOps.NEAR_SPHERE) { - return asDocument(asDBKey(expr, 0), asDocument("$nearSphere", asDBValue(expr, 1))); - } else if (op == MongodbOps.ELEM_MATCH) { - return asDocument(asDBKey(expr, 0), asDocument("$elemMatch", asDBValue(expr, 1))); - } else if (op == QuerydslMongoOps.NO_MATCH) { - return new Document("$where", new BsonJavaScript("function() { return false }")); - } - - throw new UnsupportedOperationException("Illegal operation " + expr); - } - - private Object negate(Document arg) { - - List list = new ArrayList<>(); - for (Map.Entry entry : arg.entrySet()) { - - if (entry.getKey().equals("$or")) { - list.add(asDocument("$nor", entry.getValue())); - } else if (entry.getKey().equals("$and")) { - - List list2 = new ArrayList<>(); - for (Object o : ((Collection) entry.getValue())) { - list2.add(negate((Document) o)); - } - list.add(asDocument("$or", list2)); - } else if (entry.getValue() instanceof Pattern || entry.getValue() instanceof BsonRegularExpression) { - list.add(asDocument(entry.getKey(), asDocument("$not", entry.getValue()))); - } else if (entry.getValue() instanceof Document) { - list.add(negate(entry.getKey(), (Document) entry.getValue())); - } else { - list.add(asDocument(entry.getKey(), asDocument("$ne", entry.getValue()))); - } - } - return list.size() == 1 ? list.get(0) : asDocument("$or", list); - } - - private Object negate(String key, Document value) { - - if (value.size() == 1) { - return asDocument(key, asDocument("$not", value)); - } else { - - List list2 = new ArrayList<>(); - for (Map.Entry entry2 : value.entrySet()) { - list2.add(asDocument(key, asDocument("$not", asDocument(entry2.getKey(), entry2.getValue())))); - } - - return asDocument("$or", list2); - } - } - - protected Object convert(Path property, Constant constant) { - - if (isReference(property)) { - return asReference(constant.getConstant()); - } else if (isId(property)) { - - if (isReference(property.getMetadata().getParent())) { - return asReferenceKey(property.getMetadata().getParent().getType(), constant.getConstant()); - } else if (constant.getType().equals(String.class) && isImplicitObjectIdConversion()) { - - String id = (String) constant.getConstant(); - return ObjectId.isValid(id) ? new ObjectId(id) : id; - } - } - return visit(constant, null); - } - - protected boolean isImplicitObjectIdConversion() { - return true; - } - - protected DBRef asReferenceKey(Class entity, Object id) { - // TODO override in subclass - throw new UnsupportedOperationException(); - } - - protected abstract DBRef asReference(Object constant); - - protected abstract boolean isReference(@Nullable Path arg); - - protected boolean isId(Path arg) { - // TODO override in subclass - return false; - } - - @Override - public String visit(Path expr, Void context) { - - PathMetadata metadata = expr.getMetadata(); - - if (metadata.getParent() != null) { - - Path parent = metadata.getParent(); - if (parent.getMetadata().getPathType() == PathType.DELEGATE) { - parent = parent.getMetadata().getParent(); - } - if (metadata.getPathType() == PathType.COLLECTION_ANY) { - return visit(parent, context); - } else if (parent.getMetadata().getPathType() != PathType.VARIABLE) { - - String rv = getKeyForPath(expr, metadata); - String parentStr = visit(parent, context); - return rv != null ? parentStr + "." + rv : parentStr; - } - } - return getKeyForPath(expr, metadata); - } - - protected String getKeyForPath(Path expr, PathMetadata metadata) { - return metadata.getElement().toString(); - } - - @Override - public Object visit(SubQueryExpression expr, Void context) { - throw new UnsupportedOperationException(); - } - - @Override - public Object visit(ParamExpression expr, Void context) { - throw new UnsupportedOperationException(); - } - - private LinkedList> collectConnectorArgs(String operator, Operation operation) { - - LinkedList> pendingDocuments = new LinkedList<>(); - for (Expression exp : operation.getArgs()) { - Map document = (Map) handle(exp); - if (document.keySet().size() == 1 && document.containsKey(operator)) { - pendingDocuments.addAll((Collection>) document.get(operator)); - } else { - pendingDocuments.add(document); - } - } - return pendingDocuments; - - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAbstractMongodbQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAbstractMongodbQuery.java index 422eea5778..b255d20273 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAbstractMongodbQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAbstractMongodbQuery.java @@ -34,6 +34,8 @@ import com.querydsl.core.types.OrderSpecifier; import com.querydsl.core.types.ParamExpression; import com.querydsl.core.types.Predicate; +import com.querydsl.mongodb.document.AbstractMongodbQuery; +import com.querydsl.mongodb.document.MongodbDocumentSerializer; /** * {@code QuerydslAbstractMongodbQuery} provides a base class for general Querydsl query implementation. @@ -49,8 +51,12 @@ * @author Mark Paluch * @author Christoph Strobl * @since 2.1 + * @deprecated since 3.3, use Querydsl's {@link AbstractMongodbQuery} directly. This class is deprecated for removal + * with the next major release. */ +@Deprecated public abstract class QuerydslAbstractMongodbQuery> + extends AbstractMongodbQuery implements SimpleQuery { private static final JsonWriterSettings JSON_WRITER_SETTINGS = JsonWriterSettings.builder().outputMode(JsonMode.SHELL) @@ -67,6 +73,8 @@ public abstract class QuerydslAbstractMongodbQuery((Q) this, new DefaultQueryMetadata(), false); this.serializer = serializer; } @@ -158,22 +166,6 @@ protected Document createProjection(@Nullable Expression projectionExpression return projection; } - /** - * Compute the filer {@link Document} from the given {@link Predicate}. - * - * @param predicate can be {@literal null}. - * @return an empty {@link Document} if predicate is {@literal null}. - * @see MongodbDocumentSerializer#toQuery(Predicate) - */ - protected Document createQuery(@Nullable Predicate predicate) { - - if (predicate == null) { - return new Document(); - } - - return serializer.toQuery(predicate); - } - /** * Compute the sort {@link Document} from the given list of {@link OrderSpecifier order specifiers}. * @@ -194,14 +186,6 @@ QueryMixin getQueryMixin() { return queryMixin; } - /** - * Get the where definition as a Document instance - * - * @return - */ - Document asDocument() { - return createQuery(queryMixin.getMetadata().getWhere()); - } /** * Returns the {@literal Mongo Shell} representation of the query.
        diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAnyEmbeddedBuilder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAnyEmbeddedBuilder.java index 3ba84a2dd3..b6935a5e8f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAnyEmbeddedBuilder.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAnyEmbeddedBuilder.java @@ -37,7 +37,10 @@ * @author Mark Paluch * @author Christoph Strobl * @since 2.1 + * @deprecated since 3.3, use Querydsl's {@link com.querydsl.mongodb.document.AnyEmbeddedBuilder} directly. This class + * is deprecated for removal with the next major release. */ +@Deprecated public class QuerydslAnyEmbeddedBuilder, K> { private final QueryMixin queryMixin; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslFetchableMongodbQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslFetchableMongodbQuery.java deleted file mode 100644 index 36057d58c9..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslFetchableMongodbQuery.java +++ /dev/null @@ -1,272 +0,0 @@ -/* - * Copyright 2018-2021 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.repository.support; - -import java.util.Collection; -import java.util.Collections; -import java.util.List; - -import org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithProjection; -import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.data.mongodb.core.query.BasicQuery; -import org.springframework.data.mongodb.core.query.Query; -import org.springframework.lang.Nullable; -import org.springframework.util.LinkedMultiValueMap; - -import com.mysema.commons.lang.CloseableIterator; -import com.querydsl.core.Fetchable; -import com.querydsl.core.JoinExpression; -import com.querydsl.core.QueryMetadata; -import com.querydsl.core.QueryModifiers; -import com.querydsl.core.QueryResults; -import com.querydsl.core.types.Expression; -import com.querydsl.core.types.ExpressionUtils; -import com.querydsl.core.types.Operation; -import com.querydsl.core.types.OrderSpecifier; -import com.querydsl.core.types.Path; -import com.querydsl.core.types.Predicate; -import com.querydsl.core.types.dsl.CollectionPathBase; - -/** - * {@link Fetchable} MongoDB query with utilizing {@link MongoOperations} for command execution. - * - * @param result type - * @param concrete subtype - * @author Mark Paluch - * @author Christoph Strobl - * @since 2.1 - */ -abstract class QuerydslFetchableMongodbQuery> - extends QuerydslAbstractMongodbQuery implements Fetchable { - - private final Class entityClass; - private final String collection; - private final MongoOperations mongoOperations; - private final FindWithProjection find; - - QuerydslFetchableMongodbQuery(MongodbDocumentSerializer serializer, Class entityClass, String collection, - MongoOperations mongoOperations) { - - super(serializer); - - this.entityClass = (Class) entityClass; - this.collection = collection; - this.mongoOperations = mongoOperations; - find = mongoOperations.query(this.entityClass).inCollection(collection); - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.Fetchable#iterable() - */ - @Override - public CloseableIterator iterate() { - - org.springframework.data.util.CloseableIterator stream = mongoOperations.stream(createQuery(), - entityClass, collection); - - return new CloseableIterator() { - - @Override - public boolean hasNext() { - return stream.hasNext(); - } - - @Override - public K next() { - return stream.next(); - } - - @Override - public void remove() { - throw new UnsupportedOperationException("Cannot remove from iterator while streaming data."); - } - - @Override - public void close() { - stream.close(); - } - }; - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.Fetchable#fetch() - */ - @Override - public List fetch() { - return find.matching(createQuery()).all(); - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.Fetchable#fetchFirst() - */ - @Override - public K fetchFirst() { - return find.matching(createQuery()).firstValue(); - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.Fetchable#fetchOne() - */ - @Override - public K fetchOne() { - return find.matching(createQuery()).oneValue(); - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.Fetchable#fetchResults() - */ - @Override - public QueryResults fetchResults() { - - long total = fetchCount(); - return total > 0L ? new QueryResults<>(fetch(), getQueryMixin().getMetadata().getModifiers(), total) - : QueryResults.emptyResults(); - } - - /* - * (non-Javadoc) - * @see com.querydsl.core.Fetchable#fetchCount() - */ - @Override - public long fetchCount() { - return find.matching(Query.of(createQuery()).skip(-1).limit(-1)).count(); - } - - /** - * Define a join. - * - * @param ref reference - * @param target join target - * @return new instance of {@link QuerydslJoinBuilder}. - */ - public QuerydslJoinBuilder join(Path ref, Path target) { - return new QuerydslJoinBuilder<>(getQueryMixin(), ref, target); - } - - /** - * Define a join. - * - * @param ref reference - * @param target join target - * @return new instance of {@link QuerydslJoinBuilder}. - */ - public QuerydslJoinBuilder join(CollectionPathBase ref, Path target) { - return new QuerydslJoinBuilder<>(getQueryMixin(), ref, target); - } - - /** - * Define a constraint for an embedded object. - * - * @param collection collection must not be {@literal null}. - * @param target target must not be {@literal null}. - * @return new instance of {@link QuerydslAnyEmbeddedBuilder}. - */ - public QuerydslAnyEmbeddedBuilder anyEmbedded(Path> collection, Path target) { - return new QuerydslAnyEmbeddedBuilder<>(getQueryMixin(), collection); - } - - protected org.springframework.data.mongodb.core.query.Query createQuery() { - - QueryMetadata metadata = getQueryMixin().getMetadata(); - - return createQuery(createFilter(metadata), metadata.getProjection(), metadata.getModifiers(), - metadata.getOrderBy()); - } - - protected org.springframework.data.mongodb.core.query.Query createQuery(@Nullable Predicate filter, - @Nullable Expression projection, QueryModifiers modifiers, List> orderBy) { - - BasicQuery basicQuery = new BasicQuery(createQuery(filter), createProjection(projection)); - - Integer limit = modifiers.getLimitAsInteger(); - Integer offset = modifiers.getOffsetAsInteger(); - - if (limit != null) { - basicQuery.limit(limit); - } - if (offset != null) { - basicQuery.skip(offset); - } - if (orderBy.size() > 0) { - basicQuery.setSortObject(createSort(orderBy)); - } - - return basicQuery; - } - - @Nullable - protected Predicate createFilter(QueryMetadata metadata) { - - Predicate filter; - if (!metadata.getJoins().isEmpty()) { - filter = ExpressionUtils.allOf(metadata.getWhere(), createJoinFilter(metadata)); - } else { - filter = metadata.getWhere(); - } - return filter; - } - - @SuppressWarnings("unchecked") - @Nullable - protected Predicate createJoinFilter(QueryMetadata metadata) { - - LinkedMultiValueMap, Predicate> predicates = new LinkedMultiValueMap<>(); - List joins = metadata.getJoins(); - - for (int i = joins.size() - 1; i >= 0; i--) { - - JoinExpression join = joins.get(i); - Path source = (Path) ((Operation) join.getTarget()).getArg(0); - Path target = (Path) ((Operation) join.getTarget()).getArg(1); - Collection extraFilters = predicates.get(target.getRoot()); - Predicate filter = ExpressionUtils.allOf(join.getCondition(), allOf(extraFilters)); - - List ids = getIds(target.getType(), filter); - - if (ids.isEmpty()) { - return ExpressionUtils.predicate(QuerydslMongoOps.NO_MATCH, source); - } - - Path path = ExpressionUtils.path(String.class, source, "$id"); - predicates.add(source.getRoot(), ExpressionUtils.in((Path) path, ids)); - } - - Path source = (Path) ((Operation) joins.get(0).getTarget()).getArg(0); - return allOf(predicates.get(source.getRoot())); - } - - private Predicate allOf(Collection predicates) { - return predicates != null ? ExpressionUtils.allOf(predicates) : null; - } - - /** - * Fetch the list of ids matching a given condition. - * - * @param targetType must not be {@literal null}. - * @param condition must not be {@literal null}. - * @return empty {@link List} if none found. - */ - protected List getIds(Class targetType, Predicate condition) { - - Query query = createQuery(condition, null, QueryModifiers.EMPTY, Collections.emptyList()); - return mongoOperations.findDistinct(query, "_id", targetType, Object.class); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslJoinBuilder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslJoinBuilder.java deleted file mode 100644 index 344ad08826..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslJoinBuilder.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright 2018-2021 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.repository.support; - -import com.querydsl.core.JoinType; -import com.querydsl.core.support.QueryMixin; -import com.querydsl.core.types.ExpressionUtils; -import com.querydsl.core.types.Path; -import com.querydsl.core.types.Predicate; - -/** - * {@code QuerydslJoinBuilder} is a builder for join constraints. - *

        - * Original implementation source {@link com.querydsl.mongodb.JoinBuilder} by {@literal The Querydsl Team} - * (http://www.querydsl.com/team) licensed under the Apache License, Version - * 2.0. - *

        - * Modified for usage with {@link QuerydslAbstractMongodbQuery}. - * - * @param - * @param - * @author tiwe - * @author Mark Paluch - * @author Christoph Strobl - * @since 2.1 - */ -public class QuerydslJoinBuilder, K, T> { - - private final QueryMixin queryMixin; - private final Path ref; - private final Path target; - - QuerydslJoinBuilder(QueryMixin queryMixin, Path ref, Path target) { - - this.queryMixin = queryMixin; - this.ref = ref; - this.target = target; - } - - /** - * Add the given join conditions. - * - * @param conditions must not be {@literal null}. - * @return the target {@link QueryMixin}. - * @see QueryMixin#on(Predicate) - */ - @SuppressWarnings("unchecked") - public Q on(Predicate... conditions) { - - queryMixin.addJoin(JoinType.JOIN, ExpressionUtils.as((Path) ref, target)); - queryMixin.on(conditions); - return queryMixin.getSelf(); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoOps.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoOps.java deleted file mode 100644 index 0c695afd0c..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoOps.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright 2018-2021 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.repository.support; - -import com.querydsl.core.types.Operator; - -/** - * Spring Data specific {@link Operator operators} for usage with Querydsl and MongoDB. - * - * @author Christoph Strobl - * @since 2.1 - */ -enum QuerydslMongoOps implements Operator { - - /** - * {@link Operator} always evaluating to {@literal false}. - */ - NO_MATCH(Boolean.class); - - private final Class type; - - QuerydslMongoOps(Class type) { - this.type = type; - } - - @Override - public Class getType() { - return type; - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSpringDataMongodbQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSpringDataMongodbQuery.java index 4162a79482..8b30e585e6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSpringDataMongodbQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveSpringDataMongodbQuery.java @@ -22,6 +22,9 @@ import java.util.Collections; import java.util.List; +import org.bson.Document; + +import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithProjection; import org.springframework.data.mongodb.core.ReactiveMongoOperations; import org.springframework.data.mongodb.core.query.BasicQuery; @@ -40,19 +43,21 @@ import com.querydsl.core.types.OrderSpecifier; import com.querydsl.core.types.Path; import com.querydsl.core.types.Predicate; -import com.querydsl.core.types.dsl.CollectionPathBase; +import com.querydsl.mongodb.MongodbOps; +import com.querydsl.mongodb.document.MongodbDocumentSerializer; /** * MongoDB query with utilizing {@link ReactiveMongoOperations} for command execution. * + * @implNote This class uses {@link MongoOperations} to directly convert documents into the target entity type. Also, we + * want entites to participate in lifecycle events and entity callbacks. * @param result type * @author Mark Paluch * @author Christoph Strobl * @since 2.2 */ -class ReactiveSpringDataMongodbQuery extends QuerydslAbstractMongodbQuery> { +class ReactiveSpringDataMongodbQuery extends SpringDataMongodbQuerySupport> { - private final Class entityClass; private final ReactiveMongoOperations mongoOperations; private final FindWithProjection find; @@ -60,15 +65,15 @@ class ReactiveSpringDataMongodbQuery extends QuerydslAbstractMongodbQuery entityClass, @Nullable String collection) { super(serializer); - this.entityClass = (Class) entityClass; this.mongoOperations = mongoOperations; - this.find = StringUtils.hasText(collection) ? mongoOperations.query(this.entityClass).inCollection(collection) - : mongoOperations.query(this.entityClass); + this.find = StringUtils.hasText(collection) ? mongoOperations.query((Class) entityClass).inCollection(collection) + : mongoOperations.query((Class) entityClass); } /** @@ -99,48 +104,11 @@ Mono fetchCount() { return createQuery().flatMap(it -> find.matching(it).count()); } - /** - * Define a join. - * - * @param ref reference - * @param target join target - * @return new instance of {@link QuerydslJoinBuilder}. - */ - QuerydslJoinBuilder, K, T> join(Path ref, Path target) { - return new QuerydslJoinBuilder<>(getQueryMixin(), ref, target); - } - - /** - * Define a join. - * - * @param ref reference - * @param target join target - * @return new instance of {@link QuerydslJoinBuilder}. - */ - QuerydslJoinBuilder, K, T> join(CollectionPathBase ref, - Path target) { - - return new QuerydslJoinBuilder<>(getQueryMixin(), ref, target); - } - - /** - * Define a constraint for an embedded object. - * - * @param collection collection must not be {@literal null}. - * @param target target must not be {@literal null}. - * @return new instance of {@link QuerydslAnyEmbeddedBuilder}. - */ - QuerydslAnyEmbeddedBuilder, K> anyEmbedded( - Path> collection, Path target) { - - return new QuerydslAnyEmbeddedBuilder<>(getQueryMixin(), collection); - } - protected Mono createQuery() { QueryMetadata metadata = getQueryMixin().getMetadata(); - return createQuery(createFilter(metadata), metadata.getProjection(), metadata.getModifiers(), + return createQuery(createReactiveFilter(metadata), metadata.getProjection(), metadata.getModifiers(), metadata.getOrderBy()); } @@ -160,7 +128,8 @@ protected Mono createQuery(Mono filter, @Nullable Expression { - BasicQuery basicQuery = new BasicQuery(it, createProjection(projection)); + Document fields = createProjection(projection); + BasicQuery basicQuery = new BasicQuery(it, fields == null ? new Document() : fields); Integer limit = modifiers.getLimitAsInteger(); Integer offset = modifiers.getOffsetAsInteger(); @@ -179,11 +148,11 @@ protected Mono createQuery(Mono filter, @Nullable Expression createFilter(QueryMetadata metadata) { + protected Mono createReactiveFilter(QueryMetadata metadata) { if (!metadata.getJoins().isEmpty()) { - return createJoinFilter(metadata).map(it -> ExpressionUtils.allOf(metadata.getWhere(), it)) + return createReactiveJoinFilter(metadata).map(it -> ExpressionUtils.allOf(metadata.getWhere(), it)) .switchIfEmpty(Mono.justOrEmpty(metadata.getWhere())); } @@ -197,7 +166,7 @@ protected Mono createFilter(QueryMetadata metadata) { * @return */ @SuppressWarnings("unchecked") - protected Mono createJoinFilter(QueryMetadata metadata) { + protected Mono createReactiveJoinFilter(QueryMetadata metadata) { MultiValueMap, Mono> predicates = new LinkedMultiValueMap<>(); List joins = metadata.getJoins(); @@ -230,7 +199,7 @@ protected Mono createJoinFilter(QueryMetadata metadata) { Path source = (Path) ((Operation) joins.get(0).getTarget()).getArg(0); return allOf(predicates.get(source.getRoot())).onErrorResume(NoMatchException.class, - e -> Mono.just(ExpressionUtils.predicate(QuerydslMongoOps.NO_MATCH, e.source))); + e -> Mono.just(ExpressionUtils.predicate(MongodbOps.NO_MATCH, e.source))); } private Mono allOf(@Nullable Collection> predicates) { @@ -246,8 +215,8 @@ private Mono allOf(@Nullable Collection> predicates) */ protected Flux getIds(Class targetType, Mono condition) { - return condition.flatMapMany(it -> getIds(targetType, it)) - .switchIfEmpty(Flux.defer(() -> getIds(targetType, (Predicate) null))); + return condition.flatMapMany(it -> getJoinIds(targetType, it)) + .switchIfEmpty(Flux.defer(() -> getJoinIds(targetType, (Predicate) null))); } /** @@ -257,12 +226,18 @@ protected Flux getIds(Class targetType, Mono condition) { * @param condition must not be {@literal null}. * @return empty {@link List} if none found. */ - protected Flux getIds(Class targetType, @Nullable Predicate condition) { + protected Flux getJoinIds(Class targetType, @Nullable Predicate condition) { return createQuery(Mono.justOrEmpty(condition), null, QueryModifiers.EMPTY, Collections.emptyList()) .flatMapMany(query -> mongoOperations.findDistinct(query, "_id", targetType, Object.class)); } + @Override + protected List getIds(Class aClass, Predicate predicate) { + throw new UnsupportedOperationException( + "Use create Flux getIds(Class targetType, Mono condition)"); + } + /** * Marker exception to indicate no matches for a query using reference Id's. */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuery.java index 8a153d0c2c..d62aa99c5e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuery.java @@ -15,7 +15,27 @@ */ package org.springframework.data.mongodb.repository.support; +import java.util.Collections; +import java.util.List; + +import org.bson.Document; + +import org.springframework.data.mongodb.core.ExecutableFindOperation; import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Nullable; + +import com.mysema.commons.lang.CloseableIterator; +import com.mysema.commons.lang.EmptyCloseableIterator; +import com.querydsl.core.Fetchable; +import com.querydsl.core.QueryMetadata; +import com.querydsl.core.QueryModifiers; +import com.querydsl.core.QueryResults; +import com.querydsl.core.types.Expression; +import com.querydsl.core.types.OrderSpecifier; +import com.querydsl.core.types.Predicate; +import com.querydsl.mongodb.document.MongodbDocumentSerializer; /** * Spring Data specific simple {@link com.querydsl.core.Fetchable} {@link com.querydsl.core.SimpleQuery Query} @@ -25,7 +45,13 @@ * @author Mark Paluch * @author Christoph Strobl */ -public class SpringDataMongodbQuery extends QuerydslFetchableMongodbQuery> { +public class SpringDataMongodbQuery extends SpringDataMongodbQuerySupport> + implements Fetchable { + + private final Class entityClass; + private final String collection; + private final MongoOperations mongoOperations; + private final ExecutableFindOperation.FindWithProjection find; /** * Creates a new {@link SpringDataMongodbQuery}. @@ -33,7 +59,7 @@ public class SpringDataMongodbQuery extends QuerydslFetchableMongodbQuery type) { + public SpringDataMongodbQuery(MongoOperations operations, Class type) { this(operations, type, operations.getCollectionName(type)); } @@ -44,9 +70,174 @@ public SpringDataMongodbQuery(final MongoOperations operations, final Class type, + public SpringDataMongodbQuery(MongoOperations operations, Class type, String collectionName) { + this(new SpringDataMongodbSerializer(operations.getConverter()), operations, type, collectionName); + } + + private SpringDataMongodbQuery(MongodbDocumentSerializer serializer, MongoOperations operations, + Class type, String collectionName) { + + super(serializer); - super(new SpringDataMongodbSerializer(operations.getConverter()), type, collectionName, operations); + this.entityClass = (Class) type; + this.collection = collectionName; + this.mongoOperations = operations; + this.find = mongoOperations.query(this.entityClass).inCollection(collection); } + + /* + * (non-Javadoc) + * @see com.querydsl.core.Fetchable#iterable() + */ + @Override + public CloseableIterator iterate() { + + try { + org.springframework.data.util.CloseableIterator stream = mongoOperations.stream(createQuery(), + entityClass, collection); + + return new CloseableIterator() { + + @Override + public boolean hasNext() { + return stream.hasNext(); + } + + @Override + public T next() { + return stream.next(); + } + + @Override + public void remove() { + throw new UnsupportedOperationException("Cannot remove from iterator while streaming data."); + } + + @Override + public void close() { + stream.close(); + } + }; + } catch (RuntimeException e) { + return handleException(e, new EmptyCloseableIterator<>()); + } + } + + /* + * (non-Javadoc) + * @see com.querydsl.core.Fetchable#fetch() + */ + @Override + public List fetch() { + try { + return find.matching(createQuery()).all(); + } catch (RuntimeException e) { + return handleException(e, Collections.emptyList()); + } + } + + /* + * (non-Javadoc) + * @see com.querydsl.core.Fetchable#fetchFirst() + */ + @Override + public T fetchFirst() { + try { + return find.matching(createQuery()).firstValue(); + } catch (RuntimeException e) { + return handleException(e, null); + } + } + + /* + * (non-Javadoc) + * @see com.querydsl.core.Fetchable#fetchOne() + */ + @Override + public T fetchOne() { + try { + return find.matching(createQuery()).oneValue(); + } catch (RuntimeException e) { + return handleException(e, null); + } + } + + /* + * (non-Javadoc) + * @see com.querydsl.core.Fetchable#fetchResults() + */ + @Override + public QueryResults fetchResults() { + + long total = fetchCount(); + return total > 0L ? new QueryResults<>(fetch(), getQueryMixin().getMetadata().getModifiers(), total) + : QueryResults.emptyResults(); + } + + /* + * (non-Javadoc) + * @see com.querydsl.core.Fetchable#fetchCount() + */ + @Override + public long fetchCount() { + try { + return find.matching(Query.of(createQuery()).skip(-1).limit(-1)).count(); + } catch (RuntimeException e) { + return handleException(e, 0L); + } + } + + protected org.springframework.data.mongodb.core.query.Query createQuery() { + + QueryMetadata metadata = getQueryMixin().getMetadata(); + + return createQuery(createFilter(metadata), metadata.getProjection(), metadata.getModifiers(), + metadata.getOrderBy()); + } + + protected org.springframework.data.mongodb.core.query.Query createQuery(@Nullable Predicate filter, + @Nullable Expression projection, QueryModifiers modifiers, List> orderBy) { + + Document fields = createProjection(projection); + BasicQuery basicQuery = new BasicQuery(createQuery(filter), fields == null ? new Document() : fields); + + Integer limit = modifiers.getLimitAsInteger(); + Integer offset = modifiers.getOffsetAsInteger(); + + if (limit != null) { + basicQuery.limit(limit); + } + if (offset != null) { + basicQuery.skip(offset); + } + if (orderBy.size() > 0) { + basicQuery.setSortObject(createSort(orderBy)); + } + + return basicQuery; + } + + /** + * Fetch the list of ids matching a given condition. + * + * @param targetType must not be {@literal null}. + * @param condition must not be {@literal null}. + * @return empty {@link List} if none found. + */ + protected List getIds(Class targetType, Predicate condition) { + + Query query = createQuery(condition, null, QueryModifiers.EMPTY, Collections.emptyList()); + return mongoOperations.findDistinct(query, "_id", targetType, Object.class); + } + + private static T handleException(RuntimeException e, T defaultValue) { + + if (e.getClass().getName().endsWith("$NoResults")) { + return defaultValue; + } + + throw e; + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java new file mode 100644 index 0000000000..406019cf4d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java @@ -0,0 +1,147 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository.support; + +import java.util.List; + +import org.bson.Document; +import org.bson.codecs.DocumentCodec; +import org.bson.json.JsonMode; +import org.bson.json.JsonWriterSettings; + +import org.springframework.beans.DirectFieldAccessor; + +import com.mongodb.MongoClientSettings; +import com.querydsl.core.support.QueryMixin; +import com.querydsl.core.types.OrderSpecifier; +import com.querydsl.mongodb.document.AbstractMongodbQuery; +import com.querydsl.mongodb.document.MongodbDocumentSerializer; + +/** + * Support query type to augment Spring Data-specific {@link #toString} representations and + * {@link org.springframework.data.domain.Sort} creation. + * + * @author Mark Paluch + * @since 3.3 + */ +abstract class SpringDataMongodbQuerySupport> + extends AbstractMongodbQuery { + + private final QueryMixin superQueryMixin; + + private static final JsonWriterSettings JSON_WRITER_SETTINGS = JsonWriterSettings.builder().outputMode(JsonMode.SHELL) + .build(); + + private final MongodbDocumentSerializer serializer; + + @SuppressWarnings("unchecked") + SpringDataMongodbQuerySupport(MongodbDocumentSerializer serializer) { + super(serializer); + this.serializer = serializer; + + DirectFieldAccessor fieldAccessor = new DirectFieldAccessor(this); + this.superQueryMixin = (QueryMixin) fieldAccessor.getPropertyValue("queryMixin"); + } + + /** + * Returns the {@literal Mongo Shell} representation of the query.
        + * The following query + * + *
        +	 *
        +	 * where(p.lastname.eq("Matthews")).orderBy(p.firstname.asc()).offset(1).limit(5);
        +	 * 
        + * + * results in + * + *
        +	 *
        +	 * find({"lastname" : "Matthews"}).sort({"firstname" : 1}).skip(1).limit(5)
        +	 * 
        + * + * Note that encoding to {@link String} may fail when using data types that cannot be encoded or DBRef's without an + * identifier. + * + * @return never {@literal null}. + */ + @Override + public String toString() { + + Document projection = createProjection(getQueryMixin().getMetadata().getProjection()); + Document sort = createSort(getQueryMixin().getMetadata().getOrderBy()); + DocumentCodec codec = new DocumentCodec(MongoClientSettings.getDefaultCodecRegistry()); + + StringBuilder sb = new StringBuilder("find(" + asDocument().toJson(JSON_WRITER_SETTINGS, codec)); + if (projection != null && projection.isEmpty()) { + sb.append(", ").append(projection.toJson(JSON_WRITER_SETTINGS, codec)); + } + sb.append(")"); + if (!sort.isEmpty()) { + sb.append(".sort(").append(sort.toJson(JSON_WRITER_SETTINGS, codec)).append(")"); + } + if (getQueryMixin().getMetadata().getModifiers().getOffset() != null) { + sb.append(".skip(").append(getQueryMixin().getMetadata().getModifiers().getOffset()).append(")"); + } + if (getQueryMixin().getMetadata().getModifiers().getLimit() != null) { + sb.append(".limit(").append(getQueryMixin().getMetadata().getModifiers().getLimit()).append(")"); + } + return sb.toString(); + } + + /** + * Get the where definition as a Document instance + * + * @return + */ + public Document asDocument() { + return createQuery(getQueryMixin().getMetadata().getWhere()); + } + + /** + * Obtain the {@literal Mongo Shell} json query representation. + * + * @return never {@literal null}. + */ + public String toJson() { + return toJson(JSON_WRITER_SETTINGS); + } + + /** + * Obtain the json query representation applying given {@link JsonWriterSettings settings}. + * + * @param settings must not be {@literal null}. + * @return never {@literal null}. + */ + public String toJson(JsonWriterSettings settings) { + return asDocument().toJson(settings); + } + + /** + * Compute the sort {@link Document} from the given list of {@link OrderSpecifier order specifiers}. + * + * @param orderSpecifiers can be {@literal null}. + * @return an empty {@link Document} if predicate is {@literal null}. + * @see MongodbDocumentSerializer#toSort(List) + */ + protected Document createSort(List> orderSpecifiers) { + return serializer.toSort(orderSpecifiers); + } + + // TODO: Remove once https://github.com/querydsl/querydsl/pull/2916 is merged + QueryMixin getQueryMixin() { + return superQueryMixin; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializer.java index e18f30d96a..2453e1a46c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbSerializer.java @@ -22,6 +22,7 @@ import java.util.regex.Pattern; import org.bson.Document; + import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.convert.QueryMapper; @@ -39,6 +40,7 @@ import com.querydsl.core.types.PathMetadata; import com.querydsl.core.types.PathType; import com.querydsl.mongodb.MongodbSerializer; +import com.querydsl.mongodb.document.MongodbDocumentSerializer; /** * Custom {@link MongodbSerializer} to take mapping information into account when building keys for constraints. diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java index 5584ae6e3b..0067eb3bf1 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/SimpleReactiveMongoRepositoryTests.java @@ -28,8 +28,6 @@ import java.util.Arrays; -import javax.annotation.Nullable; - import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -49,6 +47,7 @@ import org.springframework.data.mongodb.repository.support.ReactiveMongoRepositoryFactory; import org.springframework.data.mongodb.repository.support.SimpleReactiveMongoRepository; import org.springframework.data.repository.query.ReactiveQueryMethodEvaluationContextProvider; +import org.springframework.lang.Nullable; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import org.springframework.util.ClassUtils; From d57c5a952984e39fda28d2807e7ac41ae2eb162f Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 15 Jun 2021 16:51:28 +0200 Subject: [PATCH 052/983] Add support for Wildcard Index. Add WildcardIndexed annotation and the programatic WildcardIndex. Closes #3225 Original pull request: #3671. --- .../data/mongodb/core/IndexConverters.java | 4 + .../data/mongodb/core/index/IndexField.java | 25 ++- .../data/mongodb/core/index/IndexInfo.java | 26 +++ .../MongoPersistentEntityIndexResolver.java | 106 ++++++++-- .../mongodb/core/index/WildcardIndex.java | 198 ++++++++++++++++++ .../mongodb/core/index/WildcardIndexed.java | 130 ++++++++++++ .../core/index/IndexInfoUnitTests.java | 11 + ...ersistentEntityIndexResolverUnitTests.java | 85 +++++++- src/main/asciidoc/reference/mapping.adoc | 88 ++++++++ 9 files changed, 654 insertions(+), 19 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java index e7fae4df5c..4d5349f7e7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java @@ -115,6 +115,10 @@ private static Converter getIndexDefinitionIndexO ops = ops.collation(fromDocument(indexOptions.get("collation", Document.class))); } + if(indexOptions.containsKey("wildcardProjection")) { + ops.wildcardProjection(indexOptions.get("wildcardProjection", Document.class)); + } + return ops; }; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java index d0a1da68ea..7883da2270 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java @@ -29,7 +29,7 @@ public final class IndexField { enum Type { - GEO, TEXT, DEFAULT, HASH; + GEO, TEXT, DEFAULT, HASH, WILDCARD; } private final String key; @@ -48,7 +48,7 @@ private IndexField(String key, @Nullable Direction direction, @Nullable Type typ if (Type.GEO.equals(type) || Type.TEXT.equals(type)) { Assert.isNull(direction, "Geo/Text indexes must not have a direction!"); } else { - if (!Type.HASH.equals(type)) { + if (!(Type.HASH.equals(type) || Type.WILDCARD.equals(type))) { Assert.notNull(direction, "Default indexes require a direction"); } } @@ -77,6 +77,17 @@ static IndexField hashed(String key) { return new IndexField(key, null, Type.HASH); } + /** + * Creates a {@literal wildcard} {@link IndexField} for the given key. + * + * @param key must not be {@literal null} or empty. + * @return new instance of {@link IndexField}. + * @since 3.3 + */ + static IndexField wildcard(String key) { + return new IndexField(key, null, Type.WILDCARD); + } + /** * Creates a geo {@link IndexField} for the given key. * @@ -142,6 +153,16 @@ public boolean isHashed() { return Type.HASH.equals(type); } + /** + * Returns whether the {@link IndexField} is contains a {@literal wildcard} expression. + * + * @return {@literal true} if {@link IndexField} contains a wildcard {@literal $**}. + * @since 3.3 + */ + public boolean isWildcard() { + return Type.WILDCARD.equals(type); + } + /* * (non-Javadoc) * @see java.lang.Object#equals(java.lang.Object) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java index 7b507a8727..f8370b1bc6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java @@ -55,6 +55,7 @@ public class IndexInfo { private @Nullable Duration expireAfter; private @Nullable String partialFilterExpression; private @Nullable Document collation; + private @Nullable Document wildcardProjection; public IndexInfo(List indexFields, String name, boolean unique, boolean sparse, String language) { @@ -99,6 +100,8 @@ public static IndexInfo indexInfoOf(Document sourceDocument) { if (ObjectUtils.nullSafeEquals("hashed", value)) { indexFields.add(IndexField.hashed(key)); + } else if (key.contains("$**")) { + indexFields.add(IndexField.wildcard(key)); } else { Double keyValue = new Double(value.toString()); @@ -131,6 +134,10 @@ public static IndexInfo indexInfoOf(Document sourceDocument) { info.expireAfter = Duration.ofSeconds(NumberUtils.convertNumberToTargetClass(expireAfterSeconds, Long.class)); } + if (sourceDocument.containsKey("wildcardProjection")) { + info.wildcardProjection = sourceDocument.get("wildcardProjection", Document.class); + } + return info; } @@ -216,6 +223,16 @@ public Optional getCollation() { return Optional.ofNullable(collation); } + /** + * Get {@literal wildcardProjection} information. + * + * @return {@link Optional#empty() empty} if not set. + * @since 3.3 + */ + public Optional getWildcardProjection() { + return Optional.ofNullable(wildcardProjection); + } + /** * Get the duration after which documents within the index expire. * @@ -234,6 +251,14 @@ public boolean isHashed() { return getIndexFields().stream().anyMatch(IndexField::isHashed); } + /** + * @return {@literal true} if a wildcard index field is present. + * @since 3.3 + */ + public boolean isWildcard() { + return getIndexFields().stream().anyMatch(IndexField::isWildcard); + } + @Override public String toString() { @@ -303,4 +328,5 @@ public boolean equals(Object obj) { } return true; } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java index 2fc63fb36c..78f895e077 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java @@ -46,6 +46,7 @@ import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.data.mongodb.util.DotPath; import org.springframework.data.spel.EvaluationContextProvider; @@ -121,6 +122,7 @@ public List resolveIndexForEntity(MongoPersistentEntity indexInformation = new ArrayList<>(); String collection = root.getCollection(); indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", collection, root)); + indexInformation.addAll(potentiallyCreateWildcardIndexDefinitions("", collection, root)); indexInformation.addAll(potentiallyCreateTextIndexDefinition(root, collection)); root.doWithProperties((PropertyHandler) property -> this @@ -162,17 +164,18 @@ private void potentiallyAddIndexForProperty(MongoPersistentEntity root, Mongo * @return List of {@link IndexDefinitionHolder} representing indexes for given type and its referenced property * types. Will never be {@code null}. */ - private List resolveIndexForClass( TypeInformation type, String dotPath, - Path path, String collection, CycleGuard guard) { + private List resolveIndexForClass(TypeInformation type, String dotPath, Path path, + String collection, CycleGuard guard) { return resolveIndexForEntity(mappingContext.getRequiredPersistentEntity(type), dotPath, path, collection, guard); } - private List resolveIndexForEntity(MongoPersistentEntity entity, String dotPath, - Path path, String collection, CycleGuard guard) { + private List resolveIndexForEntity(MongoPersistentEntity entity, String dotPath, Path path, + String collection, CycleGuard guard) { List indexInformation = new ArrayList<>(); indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions(dotPath, collection, entity)); + indexInformation.addAll(potentiallyCreateWildcardIndexDefinitions(dotPath, collection, entity)); entity.doWithProperties((PropertyHandler) property -> this .guardAndPotentiallyAddIndexForProperty(property, dotPath, path, collection, indexInformation, guard)); @@ -196,15 +199,15 @@ private void guardAndPotentiallyAddIndexForProperty(MongoPersistentProperty pers if (persistentProperty.isEntity()) { try { - indexes.addAll(resolveIndexForEntity(mappingContext.getPersistentEntity(persistentProperty), propertyDotPath.toString(), - propertyPath, collection, guard)); + indexes.addAll(resolveIndexForEntity(mappingContext.getPersistentEntity(persistentProperty), + propertyDotPath.toString(), propertyPath, collection, guard)); } catch (CyclicPropertyReferenceException e) { LOGGER.info(e.getMessage()); } } - List indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(), collection, - persistentProperty); + List indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(), + collection, persistentProperty); if (!indexDefinitions.isEmpty()) { indexes.addAll(indexDefinitions); @@ -232,6 +235,11 @@ private List createIndexDefinitionHolderForProperty(Strin if (persistentProperty.isAnnotationPresent(HashIndexed.class)) { indices.add(createHashedIndexDefinition(dotPath, collection, persistentProperty)); } + if (persistentProperty.isAnnotationPresent(WildcardIndexed.class)) { + indices.add(createWildcardIndexDefinition(dotPath, collection, + persistentProperty.getRequiredAnnotation(WildcardIndexed.class), + mappingContext.getPersistentEntity(persistentProperty))); + } return indices; } @@ -246,6 +254,18 @@ private List potentiallyCreateCompoundIndexDefinitions(St return createCompoundIndexDefinitions(dotPath, collection, entity); } + private List potentiallyCreateWildcardIndexDefinitions(String dotPath, String collection, + MongoPersistentEntity entity) { + + if (entity.findAnnotation(WildcardIndexed.class) == null) { + return Collections.emptyList(); + } + + return Collections.singletonList(new IndexDefinitionHolder(dotPath, + createWildcardIndexDefinition(dotPath, collection, entity.getRequiredAnnotation(WildcardIndexed.class), entity), + collection)); + } + private Collection potentiallyCreateTextIndexDefinition( MongoPersistentEntity root, String collection) { @@ -292,9 +312,8 @@ private Collection potentiallyCreateTextIndexDe } - private void appendTextIndexInformation(DotPath dotPath, Path path, - TextIndexDefinitionBuilder indexDefinitionBuilder, MongoPersistentEntity entity, - TextIndexIncludeOptions includeOptions, CycleGuard guard) { + private void appendTextIndexInformation(DotPath dotPath, Path path, TextIndexDefinitionBuilder indexDefinitionBuilder, + MongoPersistentEntity entity, TextIndexIncludeOptions includeOptions, CycleGuard guard) { entity.doWithProperties(new PropertyHandler() { @@ -311,8 +330,7 @@ public void doWithPersistentProperty(MongoPersistentProperty persistentProperty) if (includeOptions.isForce() || indexed != null || persistentProperty.isEntity()) { - DotPath propertyDotPath = dotPath - .append(persistentProperty.getFieldName()); + DotPath propertyDotPath = dotPath.append(persistentProperty.getFieldName()); Path propertyPath = path.append(persistentProperty); @@ -406,6 +424,32 @@ protected IndexDefinitionHolder createCompoundIndexDefinition(String dotPath, St return new IndexDefinitionHolder(dotPath, indexDefinition, collection); } + protected IndexDefinitionHolder createWildcardIndexDefinition(String dotPath, String collection, + WildcardIndexed index, @Nullable MongoPersistentEntity entity) { + + WildcardIndex indexDefinition = new WildcardIndex(dotPath); + + if (StringUtils.hasText(index.wildcardProjection())) { + indexDefinition.wildcardProjection(evaluateWildcardProjection(index.wildcardProjection(), entity)); + } + + if (!index.useGeneratedName()) { + indexDefinition.named(pathAwareIndexName(index.name(), dotPath, entity, null)); + } + + if (StringUtils.hasText(index.partialFilter())) { + indexDefinition.partial(evaluatePartialFilter(index.partialFilter(), entity)); + } + + if (StringUtils.hasText(index.collation())) { + indexDefinition.collation(evaluateCollation(index.collation(), entity)); + } else if (entity != null && entity.hasCollation()) { + indexDefinition.collation(entity.getCollation()); + } + + return new IndexDefinitionHolder(dotPath, indexDefinition, collection); + } + private org.bson.Document resolveCompoundIndexKeyFromStringDefinition(String dotPath, String keyDefinitionString, PersistentEntity entity) { @@ -510,6 +554,33 @@ private PartialIndexFilter evaluatePartialFilter(String filterExpression, Persis return PartialIndexFilter.of(BsonUtils.parse(filterExpression, null)); } + private org.bson.Document evaluateWildcardProjection(String projectionExpression, PersistentEntity entity) { + + Object result = evaluate(projectionExpression, getEvaluationContextForProperty(entity)); + + if (result instanceof org.bson.Document) { + return (org.bson.Document) result; + } + + return BsonUtils.parse(projectionExpression, null); + } + + private Collation evaluateCollation(String collationExpression, PersistentEntity entity) { + + Object result = evaluate(collationExpression, getEvaluationContextForProperty(entity)); + if (result instanceof org.bson.Document) { + return Collation.from((org.bson.Document) result); + } + if (result instanceof Collation) { + return (Collation) result; + } + if (result instanceof String) { + return Collation.parse(result.toString()); + } + throw new IllegalStateException("Cannot parse collation " + result); + + } + /** * Creates {@link HashedIndex} wrapped in {@link IndexDefinitionHolder} out of {@link HashIndexed} for a given * {@link MongoPersistentProperty}. @@ -657,8 +728,8 @@ private void resolveAndAddIndexesForAssociation(Association indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(), collection, - property); + List indexDefinitions = createIndexDefinitionHolderForProperty(propertyDotPath.toString(), + collection, property); if (!indexDefinitions.isEmpty()) { indexes.addAll(indexDefinitions); @@ -998,6 +1069,11 @@ public org.bson.Document getIndexKeys() { public org.bson.Document getIndexOptions() { return indexDefinition.getIndexOptions(); } + + @Override + public String toString() { + return "IndexDefinitionHolder{" + "indexKeys=" + getIndexKeys() + '}'; + } } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java new file mode 100644 index 0000000000..ab1cda6183 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java @@ -0,0 +1,198 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.time.Duration; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import org.bson.Document; +import org.springframework.lang.Nullable; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; + +/** + * {@link WildcardIndex} is a specific {@link Index} that can be used to include all fields into an index based on the + * {@code $**" : 1} pattern on a root object (the one typically carrying the + * {@link org.springframework.data.mongodb.core.mapping.Document} annotation). On those it is possible to use + * {@link #wildcardProjectionInclude(String...)} and {@link #wildcardProjectionExclude(String...)} to define specific + * paths for in-/exclusion. + *

        + * It can also be used to define an index on a specific field path and its subfields, e.g. + * {@code "path.to.field.$**" : 1}.
        + * Note that {@literal wildcardProjections} are not allowed in this case. + *

        + * LIMITATIONS
        + *

          + *
        • {@link #unique() Unique} and {@link #expire(long) ttl} options are not supported.
        • + *
        • Keys used for sharding must not be included
        • + *
        • Cannot be used to generate any type of geo index.
        • + *
        + * + * @author Christoph Strobl + * @see MongoDB Reference Documentation: Wildcard + * Indexes/ + * @since 3.3 + */ +public class WildcardIndex extends Index { + + private @Nullable String fieldName; + private Map wildcardProjection = new LinkedHashMap<>(); + + /** + * Create a new instance of {@link WildcardIndex} using {@code $**}. + */ + public WildcardIndex() {} + + /** + * Create a new instance of {@link WildcardIndex} for the given {@literal path}. If no {@literal path} is provided the + * index will be considered a root one using {@code $**}.
        + * NOTE {@link #wildcardProjectionInclude(String...)}, {@link #wildcardProjectionExclude(String...)} + * can only be used for top level index definitions having an {@literal empty} or {@literal null} path. + * + * @param path can be {@literal null}. If {@literal null} all fields will be indexed. + */ + public WildcardIndex(@Nullable String path) { + this.fieldName = path; + } + + /** + * Include the {@code _id} field in {@literal wildcardProjection}. + * + * @return this. + */ + public WildcardIndex includeId() { + + wildcardProjection.put("_id", 1); + return this; + } + + /** + * Set the index name to use. + * + * @param name + * @return this. + */ + @Override + public WildcardIndex named(String name) { + + super.named(name); + return this; + } + + /** + * Unique option is not supported. + * + * @throws UnsupportedOperationException + */ + @Override + public Index unique() { + throw new UnsupportedOperationException("Wildcard Index does not support 'unique'."); + } + + /** + * ttl option is not supported. + * + * @throws UnsupportedOperationException + */ + @Override + public Index expire(long seconds) { + throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'."); + } + + /** + * ttl option is not supported. + * + * @throws UnsupportedOperationException + */ + @Override + public Index expire(long value, TimeUnit timeUnit) { + throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'."); + } + + /** + * ttl option is not supported. + * + * @throws UnsupportedOperationException + */ + @Override + public Index expire(Duration duration) { + throw new UnsupportedOperationException("Wildcard Index does not support 'ttl'."); + } + + /** + * Add fields to be included from indexing via {@code wildcardProjection}.
        + * This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes. + * + * @param paths must not be {@literal null}. + * @return this. + */ + public WildcardIndex wildcardProjectionInclude(String... paths) { + + for (String path : paths) { + wildcardProjection.put(path, 1); + } + return this; + } + + /** + * Add fields to be excluded from indexing via {@code wildcardProjection}.
        + * This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes. + * + * @param paths must not be {@literal null}. + * @return this. + */ + public WildcardIndex wildcardProjectionExclude(String... paths) { + + for (String path : paths) { + wildcardProjection.put(path, 0); + } + return this; + } + + /** + * Set the fields to be in-/excluded from indexing via {@code wildcardProjection}.
        + * This option is only allowed on {@link WildcardIndex#WildcardIndex() top level} wildcard indexes. + * + * @param includeExclude must not be {@literal null}. + * @return this. + */ + public WildcardIndex wildcardProjection(Map includeExclude) { + + wildcardProjection.putAll(includeExclude); + return this; + } + + private String getTargetFieldName() { + return StringUtils.hasText(fieldName) ? (fieldName + ".$**") : "$**"; + } + + @Override + public Document getIndexKeys() { + return new Document(getTargetFieldName(), 1); + } + + @Override + public Document getIndexOptions() { + + Document options = new Document(super.getIndexOptions()); + if (!CollectionUtils.isEmpty(wildcardProjection)) { + options.put("wildcardProjection", new Document(wildcardProjection)); + } + return options; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java new file mode 100644 index 0000000000..5f32aaf45c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java @@ -0,0 +1,130 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.index; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotation for an entity or property that should be used as key for a + * Wildcard Index.
        + * If placed on a {@link ElementType#TYPE type} that is a root level domain entity (one having an + * {@link org.springframework.data.mongodb.core.mapping.Document} annotation) will advise the index creator to create a + * wildcard index for it. + * + *
        + *
        + * @Document
        + * @WildcardIndexed
        + * public class Product {
        + *     ...
        + * }
        + *
        + * db.product.createIndex({ "$**" : 1 } , {})
        + * 
        + * + * {@literal wildcardProjection} can be used to specify keys to in-/exclude in the index. + * + *
        + *
        + * @Document
        + * @WildcardIndexed(wildcardProjection = "{ 'userMetadata.age' : 0 }")
        + * public class User {
        + *     private @Id String id;
        + *     private UserMetadata userMetadata;
        + * }
        + *
        + *
        + * db.user.createIndex(
        + *   { "$**" : 1 },
        + *   { "wildcardProjection" :
        + *     { "userMetadata.age" : 0 }
        + *   }
        + * )
        + * 
        + * + * Wildcard indexes can also be expressed by adding the annotation directly to the field. Please note that + * {@literal wildcardProjection} is not allowed on nested paths. + * + *
        + * @Document
        + * public class User {
        + * 
        + *     private @Id String id;
        + *
        + *     @WildcardIndexed
        + *     private UserMetadata userMetadata;
        + * }
        + *
        + *
        + * db.user.createIndex({ "userMetadata.$**" : 1 }, {})
        + * 
        + * + * @author Christoph Strobl + * @since 3.3 + */ +@Documented +@Target({ ElementType.TYPE, ElementType.FIELD }) +@Retention(RetentionPolicy.RUNTIME) +public @interface WildcardIndexed { + + /** + * Index name either as plain value or as {@link org.springframework.expression.spel.standard.SpelExpression template + * expression}.
        + *
        + * The name will only be applied as is when defined on root level. For usage on nested or embedded structures the + * provided name will be prefixed with the path leading to the entity.
        + * + * @return + */ + String name() default ""; + + /** + * If set to {@literal true} then MongoDB will ignore the given index name and instead generate a new name. Defaults + * to {@literal false}. + * + * @return {@literal false} by default. + */ + boolean useGeneratedName() default false; + + /** + * Only index the documents in a collection that meet a specified {@link IndexFilter filter expression}.
        + * + * @return empty by default. + * @see https://docs.mongodb.com/manual/core/index-partial/ + */ + String partialFilter() default ""; + + /** + * Explicitly specify sub fields to be in-/excluded as a {@link org.bson.Document#parse(String) prasable} String. + *
        + * NOTE: Can only be done on root level documents. + * + * @return empty by default. + */ + String wildcardProjection() default ""; + + /** + * Defines the collation to apply. + * + * @return an empty {@link String} by default. + */ + String collation() default ""; +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexInfoUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexInfoUnitTests.java index 2026dfc644..3618e4c1f9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexInfoUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexInfoUnitTests.java @@ -36,6 +36,7 @@ public class IndexInfoUnitTests { static final String INDEX_WITH_PARTIAL_FILTER = "{ \"v\" : 2, \"key\" : { \"k3y\" : 1 }, \"name\" : \"partial-filter-index\", \"ns\" : \"db.collection\", \"partialFilterExpression\" : { \"quantity\" : { \"$gte\" : 10 } } }"; static final String INDEX_WITH_EXPIRATION_TIME = "{ \"v\" : 2, \"key\" : { \"lastModifiedDate\" : 1 },\"name\" : \"expire-after-last-modified\", \"ns\" : \"db.collectio\", \"expireAfterSeconds\" : 3600 }"; static final String HASHED_INDEX = "{ \"v\" : 2, \"key\" : { \"score\" : \"hashed\" }, \"name\" : \"score_hashed\", \"ns\" : \"db.collection\" }"; + static final String WILDCARD_INDEX = "{ \"v\" : 2, \"key\" : { \"$**\" : 1 }, \"name\" : \"$**_1\", \"wildcardProjection\" : { \"fieldA\" : 0, \"fieldB.fieldC\" : 0 } }"; @Test public void isIndexForFieldsCorrectly() { @@ -79,6 +80,16 @@ public void hashedIndexIsMarkedAsSuch() { assertThat(getIndexInfo(HASHED_INDEX).isHashed()).isTrue(); } + @Test // GH-3225 + public void identifiesWildcardIndexCorrectly() { + assertThat(getIndexInfo(WILDCARD_INDEX).isWildcard()).isTrue(); + } + + @Test // GH-3225 + public void readsWildcardIndexProjectionCorrectly() { + assertThat(getIndexInfo(WILDCARD_INDEX).getWildcardProjection()).contains(new Document("fieldA", 0).append("fieldB.fieldC", 0)); + } + private static IndexInfo getIndexInfo(String documentJson) { return IndexInfo.indexInfoOf(Document.parse(documentJson)); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java index 489070548d..0a06561b67 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java @@ -15,8 +15,9 @@ */ package org.springframework.data.mongodb.core.index; +import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.*; -import static org.springframework.data.mongodb.test.util.Assertions.*; +import static org.springframework.data.mongodb.test.util.Assertions.assertThatExceptionOfType; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; @@ -25,6 +26,7 @@ import java.util.Arrays; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import org.junit.Test; import org.junit.runner.RunWith; @@ -1323,6 +1325,49 @@ public void errorsOnIndexOnEmbedded() { } + @Test // GH-3225 + public void resolvesWildcardOnRoot() { + + List indices = prepareMappingContextAndResolveIndexForType( + WithWildCardIndexOnEntity.class); + assertThat(indices).hasSize(1); + assertThat(indices.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("$**", 1); + }); + } + + @Test // GH-3225 + public void resolvesWildcardOnProperty() { + + List indices = prepareMappingContextAndResolveIndexForType( + WithWildCardIndexOnProperty.class); + assertThat(indices).hasSize(3); + assertThat(indices.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("value.$**", 1); + }); + assertThat(indices.get(1)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("the_field.$**", 1); + }); + assertThat(indices.get(2)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("withOptions.$**", 1); + assertThat(it.getIndexOptions()).containsEntry("name", + "withOptions.idx") + .containsEntry("collation", new org.bson.Document("locale", "en_US")) + .containsEntry("partialFilterExpression", new org.bson.Document("$eq", 1)); + }); + } + + @Test // GH-3225 + public void resolvesWildcardTypeOfNestedProperty() { + + List indices = prepareMappingContextAndResolveIndexForType( + WithWildCardOnEntityOfNested.class); + assertThat(indices).hasSize(1); + assertThat(indices.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("value.$**", 1); + }); + } + @Document class MixedIndexRoot { @@ -1533,7 +1578,7 @@ class InvalidIndexOnUnwrapped { @Indexed // @Unwrapped.Nullable // - UnwrappableType unwrappableType; + UnwrappableType unwrappableType; } @@ -1573,6 +1618,42 @@ class WithHashedIndex { @HashIndexed String value; } + @Document + @WildcardIndexed + class WithWildCardIndexOnEntity { + + String value; + } + + @Document + @WildcardIndexed(wildcardProjection = "{'_id' : 1, 'value' : 0}") + class WithWildCardIndexHavingProjectionOnEntity { + + String value; + } + + @Document + class WithWildCardIndexOnProperty { + + @WildcardIndexed // + Map value; + + @WildcardIndexed // + @Field("the_field") // + Map renamedField; + + @WildcardIndexed(name = "idx", partialFilter = "{ '$eq' : 1 }", collation = "en_US") // + Map withOptions; + + } + + @Document + class WithWildCardOnEntityOfNested { + + WithWildCardIndexOnEntity value; + + } + @Document class WithHashedIndexAndIndex { diff --git a/src/main/asciidoc/reference/mapping.adoc b/src/main/asciidoc/reference/mapping.adoc index f08d03d3f0..7caf1093b9 100644 --- a/src/main/asciidoc/reference/mapping.adoc +++ b/src/main/asciidoc/reference/mapping.adoc @@ -760,6 +760,94 @@ mongoOperations.indexOpsFor(Jedi.class) ---- ==== +[[mapping-usage-indexes.wildcard-index]] +=== Wildcard Indexes + +A `WildcardIndex` is an index that can be used to include all fields or specific ones based a given (wildcard) pattern. +For details, refer to the https://docs.mongodb.com/manual/core/index-wildcard/[MongoDB Documentation]. + +The index can be set up programmatically using `WildcardIndex` via `IndexOperations`. + +.Programmatic WildcardIndex setup +==== +[source,java] +---- +mongoOperations + .indexOps(User.class) + .ensureIndex(new WildcardIndex("userMetadata")); +---- +[source,javascript] +---- +db.user.createIndex({ "userMetadata.$**" : 1 }, {}) +---- +==== + +The `@WildcardIndex` annotation allows a declarative index setup an can be added on either a type or property. + +If placed on a type that is a root level domain entity (one having an `@Document` annotation) will advise the index creator to create a +wildcard index for it. + +.Wildcard index on domain type +==== +[source,java] +---- +@Document +@WildcardIndexed +public class Product { + ... +} +---- +[source,javascript] +---- +db.product.createIndex({ "$**" : 1 },{}) +---- +==== + +The `wildcardProjection` can be used to specify keys to in-/exclude in the index. + +.Wildcard index with `wildcardProjection` +==== +[source,java] +---- +@Document +@WildcardIndexed(wildcardProjection = "{ 'userMetadata.age' : 0 }") +public class User { + private @Id String id; + private UserMetadata userMetadata; +} +---- +[source,javascript] +---- +db.user.createIndex( + { "$**" : 1 }, + { "wildcardProjection" : + { "userMetadata.age" : 0 } + } +) +---- +==== + +Wildcard indexes can also be expressed by adding the annotation directly to the field. +Please note that `wildcardProjection` is not allowed on nested paths. + +.Wildcard index on property +==== +[source,java] +---- +@Document +public class User { + private @Id String id; + + @WildcardIndexed + private UserMetadata userMetadata; +} +---- +[source,javascript] +---- +db.user.createIndex({ "userMetadata.$**" : 1 }, {}) +---- +==== + [[mapping-usage-indexes.text-index]] === Text Indexes From f3b90c2b8abf760d503115f4569b5e57bd6beb8e Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 14 Jul 2021 15:03:39 +0200 Subject: [PATCH 053/983] Polishing. Reformat code. Tweak javadoc. Reject wildcard projection usage on properties with a MappingException. Omit wildcard projections when declared on document types that are used as subdocument. See #3225 Original pull request: #3671. --- .../data/mongodb/core/IndexConverters.java | 2 +- .../data/mongodb/core/index/IndexField.java | 15 +++++++-- .../data/mongodb/core/index/IndexInfo.java | 2 +- .../MongoPersistentEntityIndexResolver.java | 24 ++++++++++++-- .../mongodb/core/index/WildcardIndex.java | 10 +++--- .../mongodb/core/index/WildcardIndexed.java | 14 ++++---- ...ersistentEntityIndexResolverUnitTests.java | 33 ++++++++++++++++++- src/main/asciidoc/new-features.adoc | 1 + src/main/asciidoc/reference/mapping.adoc | 9 ++--- 9 files changed, 87 insertions(+), 23 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java index 4d5349f7e7..db1fa0bf80 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java @@ -115,7 +115,7 @@ private static Converter getIndexDefinitionIndexO ops = ops.collation(fromDocument(indexOptions.get("collation", Document.class))); } - if(indexOptions.containsKey("wildcardProjection")) { + if (indexOptions.containsKey("wildcardProjection")) { ops.wildcardProjection(indexOptions.get("wildcardProjection", Document.class)); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java index 7883da2270..843584b29d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexField.java @@ -29,7 +29,17 @@ public final class IndexField { enum Type { - GEO, TEXT, DEFAULT, HASH, WILDCARD; + GEO, TEXT, DEFAULT, + + /** + * @since 2.2 + */ + HASH, + + /** + * @since 3.3 + */ + WILDCARD; } private final String key; @@ -78,7 +88,8 @@ static IndexField hashed(String key) { } /** - * Creates a {@literal wildcard} {@link IndexField} for the given key. + * Creates a {@literal wildcard} {@link IndexField} for the given key. The {@code key} must follow the + * {@code fieldName.$**} notation. * * @param key must not be {@literal null} or empty. * @return new instance of {@link IndexField}. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java index f8370b1bc6..51b4aa48cf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/IndexInfo.java @@ -100,7 +100,7 @@ public static IndexInfo indexInfoOf(Document sourceDocument) { if (ObjectUtils.nullSafeEquals("hashed", value)) { indexFields.add(IndexField.hashed(key)); - } else if (key.contains("$**")) { + } else if (key.endsWith("$**")) { indexFields.add(IndexField.wildcard(key)); } else { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java index 78f895e077..5fdb1cbc40 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolver.java @@ -119,6 +119,8 @@ public List resolveIndexForEntity(MongoPersistentEntity String .format("Entity %s is not a collection root. Make sure to annotate it with @Document!", root.getName())); + verifyWildcardIndexedProjection(root); + List indexInformation = new ArrayList<>(); String collection = root.getCollection(); indexInformation.addAll(potentiallyCreateCompoundIndexDefinitions("", collection, root)); @@ -133,6 +135,24 @@ public List resolveIndexForEntity(MongoPersistentEntity entity) { + + entity.doWithAll(it -> { + + if (it.isAnnotationPresent(WildcardIndexed.class)) { + + WildcardIndexed indexed = it.getRequiredAnnotation(WildcardIndexed.class); + + if (!ObjectUtils.isEmpty(indexed.wildcardProjection())) { + + throw new MappingException(String.format( + "WildcardIndexed.wildcardProjection cannot be used on nested paths. Offending property: %s.%s", + entity.getName(), it.getName())); + } + } + }); + } + private void potentiallyAddIndexForProperty(MongoPersistentEntity root, MongoPersistentProperty persistentProperty, List indexes, CycleGuard guard) { @@ -257,7 +277,7 @@ private List potentiallyCreateCompoundIndexDefinitions(St private List potentiallyCreateWildcardIndexDefinitions(String dotPath, String collection, MongoPersistentEntity entity) { - if (entity.findAnnotation(WildcardIndexed.class) == null) { + if (!entity.isAnnotationPresent(WildcardIndexed.class)) { return Collections.emptyList(); } @@ -429,7 +449,7 @@ protected IndexDefinitionHolder createWildcardIndexDefinition(String dotPath, St WildcardIndex indexDefinition = new WildcardIndex(dotPath); - if (StringUtils.hasText(index.wildcardProjection())) { + if (StringUtils.hasText(index.wildcardProjection()) && ObjectUtils.isEmpty(dotPath)) { indexDefinition.wildcardProjection(evaluateWildcardProjection(index.wildcardProjection(), entity)); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java index ab1cda6183..b07c3b1bc9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndex.java @@ -51,7 +51,7 @@ public class WildcardIndex extends Index { private @Nullable String fieldName; - private Map wildcardProjection = new LinkedHashMap<>(); + private final Map wildcardProjection = new LinkedHashMap<>(); /** * Create a new instance of {@link WildcardIndex} using {@code $**}. @@ -97,7 +97,7 @@ public WildcardIndex named(String name) { /** * Unique option is not supported. * - * @throws UnsupportedOperationException + * @throws UnsupportedOperationException not supported for wildcard indexes. */ @Override public Index unique() { @@ -107,7 +107,7 @@ public Index unique() { /** * ttl option is not supported. * - * @throws UnsupportedOperationException + * @throws UnsupportedOperationException not supported for wildcard indexes. */ @Override public Index expire(long seconds) { @@ -117,7 +117,7 @@ public Index expire(long seconds) { /** * ttl option is not supported. * - * @throws UnsupportedOperationException + * @throws UnsupportedOperationException not supported for wildcard indexes. */ @Override public Index expire(long value, TimeUnit timeUnit) { @@ -127,7 +127,7 @@ public Index expire(long value, TimeUnit timeUnit) { /** * ttl option is not supported. * - * @throws UnsupportedOperationException + * @throws UnsupportedOperationException not supported for wildcard indexes. */ @Override public Index expire(Duration duration) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java index 5f32aaf45c..d1b18e85bf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/index/WildcardIndexed.java @@ -38,7 +38,7 @@ * * db.product.createIndex({ "$**" : 1 } , {}) * - * + * * {@literal wildcardProjection} can be used to specify keys to in-/exclude in the index. * *
        @@ -65,7 +65,7 @@
          * 
          * @Document
          * public class User {
        - * 
        + *
          *     private @Id String id;
          *
          *     @WildcardIndexed
        @@ -89,9 +89,9 @@
         	 * expression}. 
        *
        * The name will only be applied as is when defined on root level. For usage on nested or embedded structures the - * provided name will be prefixed with the path leading to the entity.
        - * - * @return + * provided name will be prefixed with the path leading to the entity. + * + * @return empty by default. */ String name() default ""; @@ -115,8 +115,8 @@ /** * Explicitly specify sub fields to be in-/excluded as a {@link org.bson.Document#parse(String) prasable} String. *
        - * NOTE: Can only be done on root level documents. - * + * NOTE: Can only be applied on root level documents. + * * @return empty by default. */ String wildcardProjection() default ""; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java index 0a06561b67..30f6a9bfc5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/MongoPersistentEntityIndexResolverUnitTests.java @@ -32,10 +32,12 @@ import org.junit.runner.RunWith; import org.junit.runners.Suite; import org.junit.runners.Suite.SuiteClasses; + import org.springframework.core.annotation.AliasFor; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.annotation.Id; import org.springframework.data.geo.Point; +import org.springframework.data.mapping.MappingException; import org.springframework.data.mongodb.core.DocumentTestUtils; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolver.IndexDefinitionHolder; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexResolverUnitTests.CompoundIndexResolutionTests; @@ -1333,6 +1335,20 @@ public void resolvesWildcardOnRoot() { assertThat(indices).hasSize(1); assertThat(indices.get(0)).satisfies(it -> { assertThat(it.getIndexKeys()).containsEntry("$**", 1); + assertThat(it.getIndexOptions()).isEmpty(); + }); + } + + @Test // GH-3225 + public void resolvesWildcardWithProjectionOnRoot() { + + List indices = prepareMappingContextAndResolveIndexForType( + WithWildCardIndexHavingProjectionOnEntity.class); + assertThat(indices).hasSize(1); + assertThat(indices.get(0)).satisfies(it -> { + assertThat(it.getIndexKeys()).containsEntry("$**", 1); + assertThat(it.getIndexOptions()).containsEntry("wildcardProjection", + org.bson.Document.parse("{'_id' : 1, 'value' : 0}")); }); } @@ -1365,6 +1381,15 @@ public void resolvesWildcardTypeOfNestedProperty() { assertThat(indices).hasSize(1); assertThat(indices.get(0)).satisfies(it -> { assertThat(it.getIndexKeys()).containsEntry("value.$**", 1); + assertThat(it.getIndexOptions()).hasSize(1).containsKey("name"); + }); + } + + @Test // GH-3225 + public void rejectsWildcardProjectionOnNestedPaths() { + + assertThatExceptionOfType(MappingException.class).isThrownBy(() -> { + prepareMappingContextAndResolveIndexForType(WildcardIndexedProjectionOnNestedPath.class); }); } @@ -1647,10 +1672,16 @@ class WithWildCardIndexOnProperty { } + @Document + class WildcardIndexedProjectionOnNestedPath { + + @WildcardIndexed(wildcardProjection = "{}") String foo; + } + @Document class WithWildCardOnEntityOfNested { - WithWildCardIndexOnEntity value; + WithWildCardIndexHavingProjectionOnEntity value; } diff --git a/src/main/asciidoc/new-features.adoc b/src/main/asciidoc/new-features.adoc index a74594bff0..74458b9971 100644 --- a/src/main/asciidoc/new-features.adoc +++ b/src/main/asciidoc/new-features.adoc @@ -6,6 +6,7 @@ * Extended support for <> entities. * Include/exclude `null` properties on write to `Document` through `@Field(write=…)`. +* Support for <>. [[new-features.3.2]] == What's New in Spring Data MongoDB 3.2 diff --git a/src/main/asciidoc/reference/mapping.adoc b/src/main/asciidoc/reference/mapping.adoc index 7caf1093b9..e301826697 100644 --- a/src/main/asciidoc/reference/mapping.adoc +++ b/src/main/asciidoc/reference/mapping.adoc @@ -782,9 +782,9 @@ db.user.createIndex({ "userMetadata.$**" : 1 }, {}) ---- ==== -The `@WildcardIndex` annotation allows a declarative index setup an can be added on either a type or property. +The `@WildcardIndex` annotation allows a declarative index setup that can used either with a document type or property. -If placed on a type that is a root level domain entity (one having an `@Document` annotation) will advise the index creator to create a +If placed on a type that is a root level domain entity (one annotated with `@Document`) , the index resolver will create a wildcard index for it. .Wildcard index on domain type @@ -794,7 +794,7 @@ wildcard index for it. @Document @WildcardIndexed public class Product { - ... + // … } ---- [source,javascript] @@ -828,7 +828,8 @@ db.user.createIndex( ==== Wildcard indexes can also be expressed by adding the annotation directly to the field. -Please note that `wildcardProjection` is not allowed on nested paths. +Please note that `wildcardProjection` is not allowed on nested paths such as properties. +Projections on types annotated with `@WildcardIndexed` are omitted during index creation. .Wildcard index on property ==== From 23177fef0c0fe163e44d42543f17a621969d6e5f Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 12 Jul 2021 10:32:13 +0200 Subject: [PATCH 054/983] Custom Converter should also be applicable for simple types. This commit fixes a regression that prevented custom converters from being applied to types considered store native ones. Original pull request: #3703. Fixes #3670 --- .../core/convert/MappingMongoConverter.java | 6 ++++- .../MappingMongoConverterUnitTests.java | 25 +++++++++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index aced009cda..83be993b93 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -1173,7 +1173,7 @@ protected Object getPotentiallyConvertedSimpleRead(Object value, TypeInformation @SuppressWarnings({ "rawtypes", "unchecked" }) private Object getPotentiallyConvertedSimpleRead(Object value, @Nullable Class target) { - if (target == null || ClassUtils.isAssignableValue(target, value)) { + if (target == null) { return value; } @@ -1181,6 +1181,10 @@ private Object getPotentiallyConvertedSimpleRead(Object value, @Nullable Class) target, value.toString()); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index 369f6dbdef..759be5c6b3 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -30,6 +30,7 @@ import java.time.temporal.ChronoUnit; import java.util.*; +import org.bson.types.Binary; import org.bson.types.Code; import org.bson.types.Decimal128; import org.bson.types.ObjectId; @@ -2568,6 +2569,21 @@ void readsMapContainingNullValue() { .containsEntry("item3", "i3"); } + @Test // GH-3670 + void appliesCustomConverterEvenToSimpleTypes() { + + converter = new MappingMongoConverter(resolver, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(it -> { + it.registerConverter(new MongoSimpleTypeConverter()); + })); + converter.afterPropertiesSet(); + + org.bson.Document source = new org.bson.Document("content", new Binary(new byte[] {0x00, 0x42})); + + GenericType target = converter.read(GenericType.class, source); + assertThat(target.content).isInstanceOf(byte[].class); + } + static class GenericType { T content; } @@ -3136,6 +3152,15 @@ public TypeImplementingMap convert(org.bson.Document source) { } } + @ReadingConverter + public static class MongoSimpleTypeConverter implements Converter { + + @Override + public byte[] convert(Binary source) { + return source.getData(); + } + } + static class TypeWrappingTypeImplementingMap { String id; From 3f27e8e152768d0ecff078190720ff59acdb7a70 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 12 Jul 2021 07:56:59 +0200 Subject: [PATCH 055/983] Fix raw document conversion in Collection like properties. Along the lines make sure to convert map like structures correctly if they do not come as a Document, eg. cause they got converted to a plain Map in a post load, pre convert event. Closes #3702 Original pull request: #3704. --- .../core/convert/MappingMongoConverter.java | 14 ++++++- .../MappingMongoConverterUnitTests.java | 39 +++++++++++++++++++ 2 files changed, 52 insertions(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 83be993b93..9cb1f89797 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -2040,7 +2040,19 @@ public S convert(Object source, TypeInformation } if (typeHint.isMap()) { - return (S) mapConverter.convert(this, (Bson) source, typeHint); + + if(ClassUtils.isAssignable(Document.class, typeHint.getType())) { + return (S) documentConverter.convert(this, (Bson) source, typeHint); + } + + if(source instanceof Bson) { + return (S) mapConverter.convert(this, (Bson) source, typeHint); + } + if(source instanceof Map) { + return (S) mapConverter.convert(this, new Document((Map) source), typeHint); + } + + throw new IllegalArgumentException(String.format("Expected map like structure but found %s", source.getClass())); } if (source instanceof DBRef) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index 759be5c6b3..dbc9e3d752 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -2584,6 +2584,38 @@ void appliesCustomConverterEvenToSimpleTypes() { assertThat(target.content).isInstanceOf(byte[].class); } + @Test // GH-3702 + void readsRawDocument() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("raw", new org.bson.Document("simple", 1).append("document", new org.bson.Document("inner-doc", 1))); + + WithRawDocumentProperties target = converter.read(WithRawDocumentProperties.class, source); + + assertThat(target.raw).isInstanceOf(org.bson.Document.class).isEqualTo( new org.bson.Document("simple", 1).append("document", new org.bson.Document("inner-doc", 1))); + } + + @Test // GH-3702 + void readsListOfRawDocument() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("listOfRaw", Arrays.asList(new org.bson.Document("simple", 1).append("document", new org.bson.Document("inner-doc", 1)))); + + WithRawDocumentProperties target = converter.read(WithRawDocumentProperties.class, source); + + assertThat(target.listOfRaw) + .containsExactly(new org.bson.Document("simple", 1).append("document", new org.bson.Document("inner-doc", 1))); + } + + @Test // GH-3692 + void readsMapThatDoesNotComeAsDocument() { + + org.bson.Document source = new org.bson.Document("_id", "id-1").append("mapOfObjects", Collections.singletonMap("simple", 1)); + + ClassWithMapProperty target = converter.read(ClassWithMapProperty.class, source); + + assertThat(target.mapOfObjects).containsEntry("simple",1); + + } + static class GenericType { T content; } @@ -3243,6 +3275,13 @@ public Set> entrySet() { } } + static class WithRawDocumentProperties { + + String id; + org.bson.Document raw; + List listOfRaw; + } + static class WithFieldWrite { @org.springframework.data.mongodb.core.mapping.Field( From f38f6d67ab54b29454203946d55e2004dc26504d Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Thu, 15 Jul 2021 09:59:46 +0200 Subject: [PATCH 056/983] Polishing. Support DBObject and Map that as source for entity materialization and map conversion. See #3702 Original pull request: #3704. --- .../core/convert/MappingMongoConverter.java | 13 +++--- .../data/mongodb/util/BsonUtils.java | 43 +++++++++++++++++++ .../MappingMongoConverterUnitTests.java | 5 ++- .../data/mongodb/util/json/BsonUtilsTest.java | 17 +++++++- 4 files changed, 67 insertions(+), 11 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 9cb1f89797..48505559c0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -2042,14 +2042,11 @@ public S convert(Object source, TypeInformation if (typeHint.isMap()) { if(ClassUtils.isAssignable(Document.class, typeHint.getType())) { - return (S) documentConverter.convert(this, (Bson) source, typeHint); + return (S) documentConverter.convert(this, BsonUtils.asBson(source), typeHint); } - if(source instanceof Bson) { - return (S) mapConverter.convert(this, (Bson) source, typeHint); - } - if(source instanceof Map) { - return (S) mapConverter.convert(this, new Document((Map) source), typeHint); + if (BsonUtils.supportsBson(source)) { + return (S) mapConverter.convert(this, BsonUtils.asBson(source), typeHint); } throw new IllegalArgumentException(String.format("Expected map like structure but found %s", source.getClass())); @@ -2064,8 +2061,8 @@ public S convert(Object source, TypeInformation String.format(INCOMPATIBLE_TYPES, source, BasicDBList.class, typeHint.getType(), getPath())); } - if (source instanceof Bson) { - return (S) documentConverter.convert(this, (Bson) source, typeHint); + if (BsonUtils.supportsBson(source)) { + return (S) documentConverter.convert(this, BsonUtils.asBson(source), typeHint); } return (S) elementConverter.convert(source, typeHint); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java index 4d51af7dee..d452ad662f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java @@ -494,6 +494,49 @@ private static Map getAsMap(Object source) { return null; } + /** + * Returns the given source object as {@link Bson}, i.e. {@link Document}s and maps as is or throw + * {@link IllegalArgumentException}. + * + * @param source + * @return the converted/casted source object. + * @throws IllegalArgumentException if {@code source} cannot be converted/cast to {@link Bson}. + * @since 3.2.3 + * @see #supportsBson(Object) + */ + @SuppressWarnings("unchecked") + public static Bson asBson(Object source) { + + if (source instanceof Document) { + return (Document) source; + } + + if (source instanceof BasicDBObject) { + return (BasicDBObject) source; + } + + if (source instanceof DBObject) { + return new Document(((DBObject) source).toMap()); + } + + if (source instanceof Map) { + return new Document((Map) source); + } + + throw new IllegalArgumentException(String.format("Cannot convert %s to Bson", source)); + } + + /** + * Returns the given source can be used/converted as {@link Bson}. + * + * @param source + * @return {@literal true} if the given source can be converted to {@link Bson}. + * @since 3.2.3 + */ + public static boolean supportsBson(Object source) { + return source instanceof DBObject || source instanceof Map; + } + /** * Returns given object as {@link Collection}. Will return the {@link Collection} as is if the source is a * {@link Collection} already, will convert an array into a {@link Collection} or simply create a single element diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java index dbc9e3d752..ca94ac6e3c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterUnitTests.java @@ -934,10 +934,11 @@ void convertsSetToBasicDBList() { assertThat(readResult.iterator().next()).isInstanceOf(Address.class); } - @Test // DATAMONGO-402 + @Test // DATAMONGO-402, GH-3702 void readsMemberClassCorrectly() { - org.bson.Document document = new org.bson.Document("inner", new org.bson.Document("value", "FOO!")); + org.bson.Document document = new org.bson.Document("inner", + new LinkedHashMap<>(new org.bson.Document("value", "FOO!"))); Outer outer = converter.read(Outer.class, document); assertThat(outer.inner).isNotNull(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/BsonUtilsTest.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/BsonUtilsTest.java index 8210dd9a6f..166932c237 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/BsonUtilsTest.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/util/json/BsonUtilsTest.java @@ -19,7 +19,7 @@ import java.util.ArrayList; import java.util.Collection; -import java.util.List; +import java.util.Collections; import org.bson.BsonDouble; import org.bson.BsonInt32; @@ -29,10 +29,16 @@ import org.bson.Document; import org.bson.types.ObjectId; import org.junit.jupiter.api.Test; + import org.springframework.data.mongodb.util.BsonUtils; +import com.mongodb.BasicDBList; + /** + * Unit tests for {@link BsonUtils}. + * * @author Christoph Strobl + * @author Mark Paluch */ class BsonUtilsTest { @@ -111,4 +117,13 @@ void asCollectionConvertsWrapsNonIterable() { assertThat((Collection)BsonUtils.asCollection(source)).containsExactly(source); } + + @Test // GH-3702 + void supportsBsonShouldReportIfConversionSupported() { + + assertThat(BsonUtils.supportsBson("foo")).isFalse(); + assertThat(BsonUtils.supportsBson(new Document())).isTrue(); + assertThat(BsonUtils.supportsBson(new BasicDBList())).isTrue(); + assertThat(BsonUtils.supportsBson(Collections.emptyMap())).isTrue(); + } } From bacbd7133e6d9e0f2969261110ad16ebfa0ff98a Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Thu, 15 Jul 2021 09:52:50 +0200 Subject: [PATCH 057/983] Add support for creating Time Series collection. Introduce time series to CollectionOptions and add dedicated TimeSeries annotation to derive values from. Closes #3731 Original pull request: #3732. --- .../data/mongodb/core/CollectionOptions.java | 144 ++++++++++++++++-- .../data/mongodb/core/EntityOperations.java | 72 +++++++++ .../data/mongodb/core/MongoTemplate.java | 28 +++- .../mongodb/core/ReactiveMongoTemplate.java | 17 ++- .../data/mongodb/core/mapping/TimeSeries.java | 86 +++++++++++ .../core/timeseries/Granularities.java | 45 ++++++ .../mongodb/core/timeseries/Granularity.java | 27 ++++ .../mongodb/core/MongoTemplateUnitTests.java | 54 +++++-- .../core/ReactiveMongoTemplateUnitTests.java | 47 ++++++ src/main/asciidoc/new-features.adoc | 1 + src/main/asciidoc/reference/mongodb.adoc | 1 + src/main/asciidoc/reference/time-series.adoc | 45 ++++++ 12 files changed, 547 insertions(+), 20 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularities.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java create mode 100644 src/main/asciidoc/reference/time-series.adoc diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java index ca61d18d96..3e509e54f2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java @@ -17,8 +17,11 @@ import java.util.Optional; +import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.core.timeseries.Granularities; +import org.springframework.data.mongodb.core.timeseries.Granularity; import org.springframework.data.mongodb.core.validation.Validator; import org.springframework.data.util.Optionals; import org.springframework.lang.Nullable; @@ -42,6 +45,7 @@ public class CollectionOptions { private @Nullable Boolean capped; private @Nullable Collation collation; private ValidationOptions validationOptions; + private @Nullable TimeSeriesOptions timeSeriesOptions; /** * Constructs a new CollectionOptions instance. @@ -54,17 +58,19 @@ public class CollectionOptions { */ @Deprecated public CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped) { - this(size, maxDocuments, capped, null, ValidationOptions.none()); + this(size, maxDocuments, capped, null, ValidationOptions.none(), null); } private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped, - @Nullable Collation collation, ValidationOptions validationOptions) { + @Nullable Collation collation, ValidationOptions validationOptions, + @Nullable TimeSeriesOptions timeSeriesOptions) { this.maxDocuments = maxDocuments; this.size = size; this.capped = capped; this.collation = collation; this.validationOptions = validationOptions; + this.timeSeriesOptions = timeSeriesOptions; } /** @@ -78,7 +84,7 @@ public static CollectionOptions just(Collation collation) { Assert.notNull(collation, "Collation must not be null!"); - return new CollectionOptions(null, null, null, collation, ValidationOptions.none()); + return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null); } /** @@ -88,7 +94,21 @@ public static CollectionOptions just(Collation collation) { * @since 2.0 */ public static CollectionOptions empty() { - return new CollectionOptions(null, null, null, null, ValidationOptions.none()); + return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null); + } + + /** + * Quick way to set up {@link CollectionOptions} for a Time Series collection. For more advanced settings use + * {@link #timeSeries(TimeSeriesOptions)}. + * + * @param timeField The name of the property which contains the date in each time series document. Must not be + * {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @see #timeSeries(TimeSeriesOptions) + * @since 3.3 + */ + public static CollectionOptions timeSeries(String timeField) { + return empty().timeSeries(TimeSeriesOptions.timeSeries(timeField)); } /** @@ -99,7 +119,7 @@ public static CollectionOptions empty() { * @since 2.0 */ public CollectionOptions capped() { - return new CollectionOptions(size, maxDocuments, true, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, true, collation, validationOptions, null); } /** @@ -110,7 +130,7 @@ public CollectionOptions capped() { * @since 2.0 */ public CollectionOptions maxDocuments(long maxDocuments) { - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions); } /** @@ -121,7 +141,7 @@ public CollectionOptions maxDocuments(long maxDocuments) { * @since 2.0 */ public CollectionOptions size(long size) { - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions); } /** @@ -132,7 +152,7 @@ public CollectionOptions size(long size) { * @since 2.0 */ public CollectionOptions collation(@Nullable Collation collation) { - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions); } /** @@ -252,7 +272,20 @@ public CollectionOptions schemaValidationAction(ValidationAction validationActio public CollectionOptions validation(ValidationOptions validationOptions) { Assert.notNull(validationOptions, "ValidationOptions must not be null!"); - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions); + } + + /** + * Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}. + * + * @param timeSeriesOptions must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 3.3 + */ + public CollectionOptions timeSeries(TimeSeriesOptions timeSeriesOptions) { + + Assert.notNull(timeSeriesOptions, "TimeSeriesOptions must not be null!"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions); } /** @@ -303,6 +336,16 @@ public Optional getValidationOptions() { return validationOptions.isEmpty() ? Optional.empty() : Optional.of(validationOptions); } + /** + * Get the {@link TimeSeriesOptions} if available. + * + * @return {@link Optional#empty()} if not specified. + * @since 3.3 + */ + public Optional getTimeSeriesOptions() { + return Optional.ofNullable(timeSeriesOptions); + } + /** * Encapsulation of ValidationOptions options. * @@ -398,4 +441,87 @@ boolean isEmpty() { return !Optionals.isAnyPresent(getValidator(), getValidationAction(), getValidationLevel()); } } + + /** + * Options applicable to Time Series collections. + * + * @author Christoph Strobl + * @since 3.3 + * @see https://docs.mongodb.com/manual/core/timeseries-collections + */ + public static class TimeSeriesOptions { + + private final String timeField; + + @Nullable // + private String metaField; + + private Granularity granularity; + + private TimeSeriesOptions(String timeField, @Nullable String metaField, Granularity granularity) { + + this.timeField = timeField; + this.metaField = metaField; + this.granularity = granularity; + } + + /** + * Create a new instance of {@link TimeSeriesOptions} using the given field as its {@literal timeField}. The one, + * that contains the date in each time series document.
        + * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * + * @param timeField must not be {@literal null}. + * @return new instance of {@link TimeSeriesOptions}. + */ + public static TimeSeriesOptions timeSeries(String timeField) { + return new TimeSeriesOptions(timeField, null, Granularities.DEFAULT); + } + + /** + * Set the name of the field which contains metadata in each time series document. Should not be the {@literal id} + * nor {@link TimeSeriesOptions#timeSeries(String)} timeField} nor point to an {@literal array} or + * {@link java.util.Collection}.
        + * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * + * @param metaField must not be {@literal null}. + * @return new instance of {@link TimeSeriesOptions}. + */ + public TimeSeriesOptions metaField(String metaField) { + return new TimeSeriesOptions(timeField, metaField, granularity); + } + + /** + * Select the {@link Granularity} parameter to define how data in the time series collection is organized. Select + * one that is closest to the time span between incoming measurements. + * + * @return new instance of {@link TimeSeriesOptions}. + */ + public TimeSeriesOptions granularity(Granularity granularity) { + return new TimeSeriesOptions(timeField, metaField, granularity); + } + + /** + * @return never {@literal null}. + */ + public String getTimeField() { + return timeField; + } + + /** + * @return can be {@literal null}. Might be an {@literal empty} {@link String} as well, so maybe check via + * {@link org.springframework.util.StringUtils#hasText(String)}. + */ + @Nullable + public String getMetaField() { + return metaField; + } + + /** + * @return never {@literal null}. + */ + public Granularity getGranularity() { + return granularity; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java index f2daf0287d..9fb8836e1a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java @@ -29,19 +29,23 @@ import org.springframework.data.mapping.PersistentPropertyAccessor; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mapping.model.ConvertingPropertyAccessor; +import org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions; import org.springframework.data.mongodb.core.convert.MongoWriter; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.mapping.TimeSeries; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.timeseries.Granularities; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; /** * Common operations performed on an entity in the context of it's mapping metadata. @@ -778,6 +782,24 @@ interface TypedOperations { * @return */ Optional getCollation(Query query); + + /** + * Derive the applicable {@link CollectionOptions} for the given type. + * + * @return never {@literal null}. + * @since 3.3 + */ + CollectionOptions getCollectionOptions(); + + /** + * Map the fields of a given {@link TimeSeriesOptions} against the target domain type to consider potentially + * annotated field names. + * + * @param options must not be {@literal null}. + * @return never {@literal null}. + * @since 3.3 + */ + TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options); } /** @@ -817,6 +839,16 @@ public Optional getCollation(Query query) { return query.getCollation(); } + + @Override + public CollectionOptions getCollectionOptions() { + return CollectionOptions.empty(); + } + + @Override + public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options) { + return options; + } } /** @@ -854,6 +886,46 @@ public Optional getCollation(Query query) { return Optional.ofNullable(entity.getCollation()); } + + @Override + public CollectionOptions getCollectionOptions() { + + CollectionOptions collectionOptions = CollectionOptions.empty(); + if (entity.hasCollation()) { + collectionOptions = collectionOptions.collation(entity.getCollation()); + } + + if (entity.isAnnotationPresent(TimeSeries.class)) { + + TimeSeries timeSeries = entity.getRequiredAnnotation(TimeSeries.class); + TimeSeriesOptions options = TimeSeriesOptions.timeSeries(timeSeries.timeField()); + if (StringUtils.hasText(timeSeries.metaField())) { + options = options.metaField(timeSeries.metaField()); + } + if (!Granularities.DEFAULT.equals(timeSeries.granularity())) { + options = options.granularity(timeSeries.granularity()); + } + collectionOptions = collectionOptions.timeSeries(options); + } + + return collectionOptions; + } + + @Override + public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions source) { + + TimeSeriesOptions target = TimeSeriesOptions.timeSeries(mappedNameOrDefault(source.getTimeField())); + + if (StringUtils.hasText(source.getMetaField())) { + target = target.metaField(mappedNameOrDefault(source.getMetaField())); + } + return target.granularity(source.getGranularity()); + } + + private String mappedNameOrDefault(String name) { + MongoPersistentProperty persistentProperty = entity.getPersistentProperty(name); + return persistentProperty != null ? persistentProperty.getFieldName() : name; + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java index eae4f42706..c833e511bf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java @@ -99,6 +99,7 @@ import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.data.mongodb.core.timeseries.Granularities; import org.springframework.data.mongodb.core.validation.Validator; import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.data.projection.SpelAwareProxyProjectionFactory; @@ -597,7 +598,7 @@ public void setSessionSynchronization(SessionSynchronization sessionSynchronizat * @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.Class) */ public MongoCollection createCollection(Class entityClass) { - return createCollection(entityClass, CollectionOptions.empty()); + return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions()); } /* @@ -2435,6 +2436,19 @@ protected MongoCollection doCreateCollection(String collectionName, Do co.validationOptions(options); } + if(collectionOptions.containsKey("timeseries")) { + + Document timeSeries = collectionOptions.get("timeseries", Document.class); + com.mongodb.client.model.TimeSeriesOptions options = new com.mongodb.client.model.TimeSeriesOptions(timeSeries.getString("timeField")); + if(timeSeries.containsKey("metaField")) { + options.metaField(timeSeries.getString("metaField")); + } + if(timeSeries.containsKey("granularity")) { + options.granularity(TimeSeriesGranularity.valueOf(timeSeries.getString("granularity").toUpperCase())); + } + co.timeSeriesOptions(options); + } + db.createCollection(collectionName, co); MongoCollection coll = db.getCollection(collectionName, Document.class); @@ -2589,6 +2603,18 @@ protected Document convertToDocument(@Nullable CollectionOptions collectionOptio collectionOptions.getValidationOptions().ifPresent(it -> it.getValidator() // .ifPresent(val -> doc.put("validator", getMappedValidator(val, targetType)))); + + collectionOptions.getTimeSeriesOptions().map(operations.forType(targetType)::mapTimeSeriesOptions).ifPresent(it -> { + + Document timeseries = new Document("timeField", it.getTimeField()); + if(StringUtils.hasText(it.getMetaField())) { + timeseries.append("metaField", it.getMetaField()); + } + if(!Granularities.DEFAULT.equals(it.getGranularity())) { + timeseries.append("granularity", it.getGranularity().name().toLowerCase()); + } + doc.put("timeseries", timeseries); + }); } return doc; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java index 614894f3b6..2403e9a394 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java @@ -17,6 +17,7 @@ import static org.springframework.data.mongodb.core.query.SerializationUtils.*; +import org.springframework.data.mongodb.core.timeseries.Granularities; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.util.function.Tuple2; @@ -665,7 +666,7 @@ public Mono createMono(String collectionName, ReactiveCollectionCallback< * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class) */ public Mono> createCollection(Class entityClass) { - return createCollection(entityClass, CollectionOptions.empty()); + return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions()); } /* @@ -2505,6 +2506,20 @@ protected CreateCollectionOptions convertToCreateCollectionOptions(@Nullable Col result.validationOptions(validationOptions); }); + collectionOptions.getTimeSeriesOptions().map(operations.forType(entityType)::mapTimeSeriesOptions).ifPresent(it -> { + + TimeSeriesOptions options = new TimeSeriesOptions(it.getTimeField()); + + if(StringUtils.hasText(it.getMetaField())) { + options.metaField(it.getMetaField()); + } + if(!Granularities.DEFAULT.equals(it.getGranularity())) { + options.granularity(TimeSeriesGranularity.valueOf(it.getGranularity().name().toUpperCase())); + } + + result.timeSeriesOptions(options); + }); + return result; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java new file mode 100644 index 0000000000..8a5fe255e0 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java @@ -0,0 +1,86 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.core.annotation.AliasFor; +import org.springframework.data.mongodb.core.timeseries.Granularities; + +/** + * Identifies a domain object to be persisted to a MongoDB Time Series collection. + * + * @author Christoph Strobl + * @since 3.3 + * @see https://docs.mongodb.com/manual/core/timeseries-collections + */ +@Inherited +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE }) +@Document +public @interface TimeSeries { + + /** + * The collection the document representing the entity is supposed to be stored in. If not configured, a default + * collection name will be derived from the type's name. The attribute supports SpEL expressions to dynamically + * calculate the collection based on a per operation basis. + * + * @return the name of the collection to be used. + * @see Document#collection() + */ + @AliasFor(annotation = Document.class, attribute = "collection") + String collection() default ""; + + /** + * The name of the property which contains the date in each time series document.
        + * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * + * @return never {@literal null}. + */ + String timeField(); + + /** + * The name of the field which contains metadata in each time series document. Should not be the {@literal id} nor + * {@link #timeField()} nor point to an {@literal array} or {@link java.util.Collection}.
        + * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * + * @return empty {@link String} by default. + */ + String metaField() default ""; + + /** + * Select the {@link Granularities granularity} parameter to define how data in the time series collection is + * organized. + * + * @return {@link Granularities#DEFAULT server default} by default. + */ + Granularities granularity() default Granularities.DEFAULT; + + /** + * Defines the collation to apply when executing a query or creating indexes. + * + * @return an empty {@link String} by default. + * @see Document#collation() + */ + @AliasFor(annotation = Document.class, attribute = "collation") + String collation() default ""; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularities.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularities.java new file mode 100644 index 0000000000..f4cac5232c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularities.java @@ -0,0 +1,45 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.timeseries; + +/** + * {@link Granularity Granularities} available for Time Series data. + * + * @author Christoph Strobl + * @since 3.3 + */ +public enum Granularities implements Granularity { + + /** + * Server default value to indicate no explicit value should be sent. + */ + DEFAULT, + + /** + * High frequency ingestion. + */ + SECONDS, + + /** + * Medium frequency ingestion. + */ + MINUTES, + + /** + * Low frequency ingestion. + */ + HOURS +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java new file mode 100644 index 0000000000..c8fe496adb --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java @@ -0,0 +1,27 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.timeseries; + +/** + * The Granularity of time series data that is closest to the time span between incoming measurements. + * + * @author Christoph Strobl + * @since 3.3 + */ +public interface Granularity { + + String name(); +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java index 68c83a2757..cc215c956c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java @@ -19,12 +19,14 @@ import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import static org.springframework.data.mongodb.test.util.Assertions.*; +import com.mongodb.client.model.*; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import java.math.BigInteger; import java.time.Duration; +import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -82,6 +84,7 @@ import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.Sharded; +import org.springframework.data.mongodb.core.mapping.TimeSeries; import org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener; import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback; import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback; @@ -98,6 +101,7 @@ import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.timeseries.Granularities; import org.springframework.lang.Nullable; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.util.CollectionUtils; @@ -117,15 +121,6 @@ import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoCursor; import com.mongodb.client.MongoDatabase; -import com.mongodb.client.model.CountOptions; -import com.mongodb.client.model.CreateCollectionOptions; -import com.mongodb.client.model.DeleteOptions; -import com.mongodb.client.model.FindOneAndDeleteOptions; -import com.mongodb.client.model.FindOneAndReplaceOptions; -import com.mongodb.client.model.FindOneAndUpdateOptions; -import com.mongodb.client.model.MapReduceAction; -import com.mongodb.client.model.ReplaceOptions; -import com.mongodb.client.model.UpdateOptions; import com.mongodb.client.result.DeleteResult; import com.mongodb.client.result.UpdateResult; @@ -2256,6 +2251,30 @@ void saveErrorsOnCollectionLikeObjects() { .isThrownBy(() -> template.save(new ArrayList<>(Arrays.asList(1, 2, 3)), "myList")); } + @Test // GH-3731 + void createCollectionShouldSetUpTimeSeriesWithDefaults() { + + template.createCollection(TimeSeriesTypeWithDefaults.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("timestamp").toString()); + } + + @Test // GH-3731 + void createCollectionShouldSetUpTimeSeries() { + + template.createCollection(TimeSeriesType.class); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").metaField("meta").granularity(TimeSeriesGranularity.HOURS).toString()); + } + class AutogenerateableId { @Id BigInteger id; @@ -2358,6 +2377,23 @@ static class WithShardKeyPointingToNested { WithNamedFields nested; } + @TimeSeries(timeField = "timestamp") + static class TimeSeriesTypeWithDefaults { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", metaField = "meta", granularity = Granularities.HOURS) + static class TimeSeriesType { + + String id; + + @Field("time_stamp") + Instant timestamp; + Object meta; + } + static class TypeImplementingIterator implements Iterator { @Override diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java index 5c5a307f1d..17fde7ec32 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java @@ -20,15 +20,21 @@ import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import static org.springframework.data.mongodb.test.util.Assertions.assertThat; +import com.mongodb.client.model.TimeSeriesGranularity; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; +import org.springframework.data.mongodb.core.MongoTemplateUnitTests.TimeSeriesType; +import org.springframework.data.mongodb.core.MongoTemplateUnitTests.TimeSeriesTypeWithDefaults; import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.mapping.TimeSeries; +import org.springframework.data.mongodb.core.timeseries.Granularities; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; import java.time.Duration; +import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -1426,6 +1432,30 @@ void insertErrorsOnPublisher() { .isThrownBy(() -> template.insert(publisher)); } + @Test // GH-3731 + void createCollectionShouldSetUpTimeSeriesWithDefaults() { + + template.createCollection(TimeSeriesTypeWithDefaults.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("timestamp").toString()); + } + + @Test // GH-3731 + void createCollectionShouldSetUpTimeSeries() { + + template.createCollection(TimeSeriesType.class).subscribe(); + + ArgumentCaptor options = ArgumentCaptor.forClass(CreateCollectionOptions.class); + verify(db).createCollection(any(), options.capture()); + + assertThat(options.getValue().getTimeSeriesOptions().toString()) + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").metaField("meta").granularity(TimeSeriesGranularity.HOURS).toString()); + } + private void stubFindSubscribe(Document document) { Publisher realPublisher = Flux.just(document); @@ -1483,6 +1513,23 @@ static class EntityWithListOfSimple { List grades; } + @TimeSeries(timeField = "timestamp") + static class TimeSeriesTypeWithDefaults { + + String id; + Instant timestamp; + } + + @TimeSeries(timeField = "timestamp", metaField = "meta", granularity = Granularities.HOURS) + static class TimeSeriesType { + + String id; + + @Field("time_stamp") + Instant timestamp; + Object meta; + } + static class ValueCapturingEntityCallback { private final List values = new ArrayList<>(1); diff --git a/src/main/asciidoc/new-features.adoc b/src/main/asciidoc/new-features.adoc index 74458b9971..ddfa1e96ec 100644 --- a/src/main/asciidoc/new-features.adoc +++ b/src/main/asciidoc/new-features.adoc @@ -5,6 +5,7 @@ == What's New in Spring Data MongoDB 3.3 * Extended support for <> entities. +* Support for <> collections. * Include/exclude `null` properties on write to `Document` through `@Field(write=…)`. * Support for <>. diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc index fb35bb655b..84afc7ea09 100644 --- a/src/main/asciidoc/reference/mongodb.adoc +++ b/src/main/asciidoc/reference/mongodb.adoc @@ -3382,3 +3382,4 @@ class GridFsClient { include::tailable-cursors.adoc[] include::change-streams.adoc[] +include::time-series.adoc[] diff --git a/src/main/asciidoc/reference/time-series.adoc b/src/main/asciidoc/reference/time-series.adoc new file mode 100644 index 0000000000..ac36e4026e --- /dev/null +++ b/src/main/asciidoc/reference/time-series.adoc @@ -0,0 +1,45 @@ +[[time-series]] +== Time Series + +MongoDB 5.0 introduced https://docs.mongodb.com/manual/core/timeseries-collections/[Time Series] collections optimized to efficiently store sequences of measurements. +Those collections need to be actively created before inserting any data. This can be done by manually executing the command, defining time series collection options or extracting options from a `@TimeSeries` annotation as shown in the examples below. + +.Create a Time Series Collection +==== +.Create a Time Series via the MongoDB Driver +[code, java] +---- +template.execute(db -> { + + com.mongodb.client.model.CreateCollectionOptions options = new CreateCollectionOptions(); + options.timeSeriesOptions(new TimeSeriesOptions("timestamp")); + + db.createCollection("weather", options); + return "OK"; +}); +---- + +.Create a Time Series Collection with CollectionOptions +[code, java] +---- +template.createCollection("weather", CollectionOptions.timeSeries("timestamp")); +---- + +.Create a Time Series Collection derived from an Annotation +[code, java] +---- +@TimeSeries(collection="weather", timeField = "timestamp") +public class Measurement { + + String id; + Instant timestamp; + // ... +} + +template.createCollection(Measurement.class); +---- +==== + +The snippets above can easily be transferred to the reactive API offering the very same methods. +Just make sure to _subscribe_. + From f00991dc293dceee172b1ece6613dde599a0665d Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 16 Jul 2021 09:41:16 +0200 Subject: [PATCH 058/983] Polishing. Rename Granularities/Granularity to Granularity and GranularityDefinition to proivide a more natural wording towards using predefined granularities. Validate presence of referenced properties through the TimeSeries annotation. Tweak Javadoc, reformat code, add unit tests. See #3731 Original pull request: #3732. --- .../data/mongodb/core/CollectionOptions.java | 24 ++++---- .../data/mongodb/core/EntityOperations.java | 16 ++++- .../data/mongodb/core/MongoTemplate.java | 58 +++++++++--------- .../mongodb/core/ReactiveMongoTemplate.java | 22 +++---- .../data/mongodb/core/mapping/TimeSeries.java | 17 +++--- .../mongodb/core/timeseries/Granularity.java | 24 +++++++- ...rities.java => GranularityDefinition.java} | 24 +------- .../core/EntityOperationsUnitTests.java | 60 +++++++++++++++++++ .../mongodb/core/MongoTemplateUnitTests.java | 26 +++++--- .../core/ReactiveMongoTemplateUnitTests.java | 24 ++++---- src/main/asciidoc/reference/time-series.adoc | 15 ++--- 11 files changed, 198 insertions(+), 112 deletions(-) rename spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/{Granularities.java => GranularityDefinition.java} (66%) create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java index 3e509e54f2..edff52bb74 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java @@ -20,8 +20,8 @@ import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; -import org.springframework.data.mongodb.core.timeseries.Granularities; import org.springframework.data.mongodb.core.timeseries.Granularity; +import org.springframework.data.mongodb.core.timeseries.GranularityDefinition; import org.springframework.data.mongodb.core.validation.Validator; import org.springframework.data.util.Optionals; import org.springframework.lang.Nullable; @@ -100,7 +100,7 @@ public static CollectionOptions empty() { /** * Quick way to set up {@link CollectionOptions} for a Time Series collection. For more advanced settings use * {@link #timeSeries(TimeSeriesOptions)}. - * + * * @param timeField The name of the property which contains the date in each time series document. Must not be * {@literal null}. * @return new instance of {@link CollectionOptions}. @@ -454,12 +454,13 @@ public static class TimeSeriesOptions { private final String timeField; - @Nullable // - private String metaField; + private @Nullable final String metaField; + + private final GranularityDefinition granularity; - private Granularity granularity; + private TimeSeriesOptions(String timeField, @Nullable String metaField, GranularityDefinition granularity) { - private TimeSeriesOptions(String timeField, @Nullable String metaField, Granularity granularity) { + Assert.hasText(timeField, "Time field must not be empty or null!"); this.timeField = timeField; this.metaField = metaField; @@ -475,7 +476,7 @@ private TimeSeriesOptions(String timeField, @Nullable String metaField, Granular * @return new instance of {@link TimeSeriesOptions}. */ public static TimeSeriesOptions timeSeries(String timeField) { - return new TimeSeriesOptions(timeField, null, Granularities.DEFAULT); + return new TimeSeriesOptions(timeField, null, Granularity.DEFAULT); } /** @@ -492,12 +493,13 @@ public TimeSeriesOptions metaField(String metaField) { } /** - * Select the {@link Granularity} parameter to define how data in the time series collection is organized. Select - * one that is closest to the time span between incoming measurements. + * Select the {@link GranularityDefinition} parameter to define how data in the time series collection is organized. + * Select one that is closest to the time span between incoming measurements. * * @return new instance of {@link TimeSeriesOptions}. + * @see Granularity */ - public TimeSeriesOptions granularity(Granularity granularity) { + public TimeSeriesOptions granularity(GranularityDefinition granularity) { return new TimeSeriesOptions(timeField, metaField, granularity); } @@ -520,7 +522,7 @@ public String getMetaField() { /** * @return never {@literal null}. */ - public Granularity getGranularity() { + public GranularityDefinition getGranularity() { return granularity; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java index 9fb8836e1a..3bba17aaef 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java @@ -38,7 +38,7 @@ import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.mongodb.core.timeseries.Granularities; +import org.springframework.data.mongodb.core.timeseries.Granularity; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; @@ -898,11 +898,23 @@ public CollectionOptions getCollectionOptions() { if (entity.isAnnotationPresent(TimeSeries.class)) { TimeSeries timeSeries = entity.getRequiredAnnotation(TimeSeries.class); + + if (entity.getPersistentProperty(timeSeries.timeField()) == null) { + throw new MappingException(String.format("Time series field '%s' does not exist in type %s", + timeSeries.timeField(), entity.getName())); + } + TimeSeriesOptions options = TimeSeriesOptions.timeSeries(timeSeries.timeField()); if (StringUtils.hasText(timeSeries.metaField())) { + + if (entity.getPersistentProperty(timeSeries.metaField()) == null) { + throw new MappingException( + String.format("Meta field '%s' does not exist in type %s", timeSeries.metaField(), entity.getName())); + } + options = options.metaField(timeSeries.metaField()); } - if (!Granularities.DEFAULT.equals(timeSeries.granularity())) { + if (!Granularity.DEFAULT.equals(timeSeries.granularity())) { options = options.granularity(timeSeries.granularity()); } collectionOptions = collectionOptions.timeSeries(options); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java index c833e511bf..fb0780c5c8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java @@ -99,7 +99,7 @@ import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; -import org.springframework.data.mongodb.core.timeseries.Granularities; +import org.springframework.data.mongodb.core.timeseries.Granularity; import org.springframework.data.mongodb.core.validation.Validator; import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.data.projection.SpelAwareProxyProjectionFactory; @@ -2436,14 +2436,15 @@ protected MongoCollection doCreateCollection(String collectionName, Do co.validationOptions(options); } - if(collectionOptions.containsKey("timeseries")) { + if (collectionOptions.containsKey("timeseries")) { Document timeSeries = collectionOptions.get("timeseries", Document.class); - com.mongodb.client.model.TimeSeriesOptions options = new com.mongodb.client.model.TimeSeriesOptions(timeSeries.getString("timeField")); - if(timeSeries.containsKey("metaField")) { + com.mongodb.client.model.TimeSeriesOptions options = new com.mongodb.client.model.TimeSeriesOptions( + timeSeries.getString("timeField")); + if (timeSeries.containsKey("metaField")) { options.metaField(timeSeries.getString("metaField")); } - if(timeSeries.containsKey("granularity")) { + if (timeSeries.containsKey("granularity")) { options.granularity(TimeSeriesGranularity.valueOf(timeSeries.getString("granularity").toUpperCase())); } co.timeSeriesOptions(options); @@ -2604,17 +2605,18 @@ protected Document convertToDocument(@Nullable CollectionOptions collectionOptio collectionOptions.getValidationOptions().ifPresent(it -> it.getValidator() // .ifPresent(val -> doc.put("validator", getMappedValidator(val, targetType)))); - collectionOptions.getTimeSeriesOptions().map(operations.forType(targetType)::mapTimeSeriesOptions).ifPresent(it -> { + collectionOptions.getTimeSeriesOptions().map(operations.forType(targetType)::mapTimeSeriesOptions) + .ifPresent(it -> { - Document timeseries = new Document("timeField", it.getTimeField()); - if(StringUtils.hasText(it.getMetaField())) { - timeseries.append("metaField", it.getMetaField()); - } - if(!Granularities.DEFAULT.equals(it.getGranularity())) { - timeseries.append("granularity", it.getGranularity().name().toLowerCase()); - } - doc.put("timeseries", timeseries); - }); + Document timeseries = new Document("timeField", it.getTimeField()); + if (StringUtils.hasText(it.getMetaField())) { + timeseries.append("metaField", it.getMetaField()); + } + if (!Granularity.DEFAULT.equals(it.getGranularity())) { + timeseries.append("granularity", it.getGranularity().name().toLowerCase()); + } + doc.put("timeseries", timeseries); + }); } return doc; @@ -2849,9 +2851,9 @@ private void executeQueryInternal(CollectionCallback> col .initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection) .iterator()) { - while (cursor.hasNext()) { - callbackHandler.processDocument(cursor.next()); - } + while (cursor.hasNext()) { + callbackHandler.processDocument(cursor.next()); + } } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } @@ -3175,17 +3177,17 @@ private class ReadDocumentCallback implements DocumentCallback { public T doWith(Document document) { - maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName)); - T entity = reader.read(type, document); + maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName)); + T entity = reader.read(type, document); - if (entity == null) { - throw new MappingException(String.format("EntityReader %s returned null", reader)); - } + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", reader)); + } - maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); - entity = maybeCallAfterConvert(entity, document, collectionName); + maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); + entity = maybeCallAfterConvert(entity, document, collectionName); - return entity; + return entity; } } @@ -3237,8 +3239,8 @@ public T doWith(Document document) { Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, entity) : entity; - maybeEmitEvent(new AfterConvertEvent<>(document, result, collectionName)); - return (T) maybeCallAfterConvert(result, document, collectionName); + maybeEmitEvent(new AfterConvertEvent<>(document, result, collectionName)); + return (T) maybeCallAfterConvert(result, document, collectionName); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java index 2403e9a394..82a3d12260 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java @@ -17,7 +17,6 @@ import static org.springframework.data.mongodb.core.query.SerializationUtils.*; -import org.springframework.data.mongodb.core.timeseries.Granularities; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.util.function.Tuple2; @@ -111,6 +110,7 @@ import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.data.mongodb.core.timeseries.Granularity; import org.springframework.data.mongodb.core.validation.Validator; import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.data.projection.SpelAwareProxyProjectionFactory; @@ -975,7 +975,8 @@ public Flux aggregate(Aggregation aggregation, String collectionName, Cla return doAggregate(aggregation, collectionName, null, outputType); } - protected Flux doAggregate(Aggregation aggregation, String collectionName, @Nullable Class inputType, Class outputType) { + protected Flux doAggregate(Aggregation aggregation, String collectionName, @Nullable Class inputType, + Class outputType) { Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); Assert.hasText(collectionName, "Collection name must not be null or empty!"); @@ -987,19 +988,18 @@ protected Flux doAggregate(Aggregation aggregation, String collectionName AggregationDefinition ctx = queryOperations.createAggregation(aggregation, inputType); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(ctx.getAggregationPipeline()), collectionName); + LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(ctx.getAggregationPipeline()), + collectionName); } ReadDocumentCallback readCallback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName); - return execute(collectionName, - collection -> aggregateAndMap(collection, ctx.getAggregationPipeline(), ctx.isOutOrMerge(), options, - readCallback, - ctx.getInputType())); + return execute(collectionName, collection -> aggregateAndMap(collection, ctx.getAggregationPipeline(), + ctx.isOutOrMerge(), options, readCallback, ctx.getInputType())); } private Flux aggregateAndMap(MongoCollection collection, List pipeline, - boolean isOutOrMerge, - AggregationOptions options, ReadDocumentCallback readCallback, @Nullable Class inputType) { + boolean isOutOrMerge, AggregationOptions options, ReadDocumentCallback readCallback, + @Nullable Class inputType) { AggregatePublisher cursor = collection.aggregate(pipeline, Document.class) .allowDiskUse(options.isAllowDiskUse()); @@ -2510,10 +2510,10 @@ protected CreateCollectionOptions convertToCreateCollectionOptions(@Nullable Col TimeSeriesOptions options = new TimeSeriesOptions(it.getTimeField()); - if(StringUtils.hasText(it.getMetaField())) { + if (StringUtils.hasText(it.getMetaField())) { options.metaField(it.getMetaField()); } - if(!Granularities.DEFAULT.equals(it.getGranularity())) { + if (!Granularity.DEFAULT.equals(it.getGranularity())) { options.granularity(TimeSeriesGranularity.valueOf(it.getGranularity().name().toUpperCase())); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java index 8a5fe255e0..d3f694f539 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/TimeSeries.java @@ -22,7 +22,7 @@ import java.lang.annotation.Target; import org.springframework.core.annotation.AliasFor; -import org.springframework.data.mongodb.core.timeseries.Granularities; +import org.springframework.data.mongodb.core.timeseries.Granularity; /** * Identifies a domain object to be persisted to a MongoDB Time Series collection. @@ -50,8 +50,9 @@ String collection() default ""; /** - * The name of the property which contains the date in each time series document.
        - * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * Name of the property which contains the date in each time series document.
        + * Translation of property names to {@link Field#name() annotated fieldnames} will be considered during the mapping + * process. * * @return never {@literal null}. */ @@ -60,19 +61,19 @@ /** * The name of the field which contains metadata in each time series document. Should not be the {@literal id} nor * {@link #timeField()} nor point to an {@literal array} or {@link java.util.Collection}.
        - * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * Translation of property names to {@link Field#name() annotated fieldnames} will be considered during the mapping + * process. * * @return empty {@link String} by default. */ String metaField() default ""; /** - * Select the {@link Granularities granularity} parameter to define how data in the time series collection is - * organized. + * Select the {@link Granularity granularity} parameter to define how data in the time series collection is organized. * - * @return {@link Granularities#DEFAULT server default} by default. + * @return {@link Granularity#DEFAULT server default} by default. */ - Granularities granularity() default Granularities.DEFAULT; + Granularity granularity() default Granularity.DEFAULT; /** * Defines the collation to apply when executing a query or creating indexes. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java index c8fe496adb..30ae007fc6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularity.java @@ -16,12 +16,30 @@ package org.springframework.data.mongodb.core.timeseries; /** - * The Granularity of time series data that is closest to the time span between incoming measurements. + * {@link GranularityDefinition Granularities} available for Time Series data. * * @author Christoph Strobl * @since 3.3 */ -public interface Granularity { +public enum Granularity implements GranularityDefinition { - String name(); + /** + * Server default value to indicate no explicit value should be sent. + */ + DEFAULT, + + /** + * High frequency ingestion. + */ + SECONDS, + + /** + * Medium frequency ingestion. + */ + MINUTES, + + /** + * Low frequency ingestion. + */ + HOURS } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularities.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/GranularityDefinition.java similarity index 66% rename from spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularities.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/GranularityDefinition.java index f4cac5232c..06f77cb594 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/Granularities.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/timeseries/GranularityDefinition.java @@ -16,30 +16,12 @@ package org.springframework.data.mongodb.core.timeseries; /** - * {@link Granularity Granularities} available for Time Series data. + * The Granularity of time series data that is closest to the time span between incoming measurements. * * @author Christoph Strobl * @since 3.3 */ -public enum Granularities implements Granularity { +public interface GranularityDefinition { - /** - * Server default value to indicate no explicit value should be sent. - */ - DEFAULT, - - /** - * High frequency ingestion. - */ - SECONDS, - - /** - * Medium frequency ingestion. - */ - MINUTES, - - /** - * Low frequency ingestion. - */ - HOURS + String name(); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java new file mode 100644 index 0000000000..901ac1f0dd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java @@ -0,0 +1,60 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import java.time.Instant; + +import org.junit.jupiter.api.Test; + +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.TimeSeries; + +/** + * Unit tests for {@link EntityOperations}. + * + * @author Mark Paluch + */ +class EntityOperationsUnitTests { + + EntityOperations operations = new EntityOperations(new MongoMappingContext()); + + @Test // GH-3731 + void shouldReportInvalidTimeField() { + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> operations.forType(InvalidTimeField.class).getCollectionOptions()) + .withMessageContaining("Time series field 'foo' does not exist"); + } + + @Test // GH-3731 + void shouldReportInvalidMetaField() { + assertThatExceptionOfType(MappingException.class) + .isThrownBy(() -> operations.forType(InvalidMetaField.class).getCollectionOptions()) + .withMessageContaining("Meta field 'foo' does not exist"); + } + + @TimeSeries(timeField = "foo") + static class InvalidTimeField { + + } + + @TimeSeries(timeField = "time", metaField = "foo") + static class InvalidMetaField { + Instant time; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java index cc215c956c..147d2e49c3 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java @@ -19,7 +19,6 @@ import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import static org.springframework.data.mongodb.test.util.Assertions.*; -import com.mongodb.client.model.*; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; @@ -101,7 +100,7 @@ import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; -import org.springframework.data.mongodb.core.timeseries.Granularities; +import org.springframework.data.mongodb.core.timeseries.Granularity; import org.springframework.lang.Nullable; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.util.CollectionUtils; @@ -121,6 +120,16 @@ import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoCursor; import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.CountOptions; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.FindOneAndDeleteOptions; +import com.mongodb.client.model.FindOneAndReplaceOptions; +import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.MapReduceAction; +import com.mongodb.client.model.ReplaceOptions; +import com.mongodb.client.model.TimeSeriesGranularity; +import com.mongodb.client.model.UpdateOptions; import com.mongodb.client.result.DeleteResult; import com.mongodb.client.result.UpdateResult; @@ -1982,7 +1991,8 @@ void shouldIncludeValueFromNestedShardKeyPath() { ArgumentCaptor filter = ArgumentCaptor.forClass(Bson.class); verify(collection).replaceOne(filter.capture(), any(), any()); - assertThat(filter.getValue()).isEqualTo(new Document("_id", "id-1").append("value", "v1").append("nested.custom-named-field", "cname")); + assertThat(filter.getValue()) + .isEqualTo(new Document("_id", "id-1").append("value", "v1").append("nested.custom-named-field", "cname")); } @Test // DATAMONGO-2341 @@ -2272,7 +2282,8 @@ void createCollectionShouldSetUpTimeSeries() { verify(db).createCollection(any(), options.capture()); assertThat(options.getValue().getTimeSeriesOptions().toString()) - .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").metaField("meta").granularity(TimeSeriesGranularity.HOURS).toString()); + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").metaField("meta") + .granularity(TimeSeriesGranularity.HOURS).toString()); } class AutogenerateableId { @@ -2370,7 +2381,7 @@ static class Sith { @Field("firstname") String name; } - @Sharded(shardKey = {"value", "nested.customName"}) + @Sharded(shardKey = { "value", "nested.customName" }) static class WithShardKeyPointingToNested { String id; String value; @@ -2384,13 +2395,12 @@ static class TimeSeriesTypeWithDefaults { Instant timestamp; } - @TimeSeries(timeField = "timestamp", metaField = "meta", granularity = Granularities.HOURS) + @TimeSeries(timeField = "timestamp", metaField = "meta", granularity = Granularity.HOURS) static class TimeSeriesType { String id; - @Field("time_stamp") - Instant timestamp; + @Field("time_stamp") Instant timestamp; Object meta; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java index 17fde7ec32..10e4f1cfcc 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/ReactiveMongoTemplateUnitTests.java @@ -20,15 +20,9 @@ import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; import static org.springframework.data.mongodb.test.util.Assertions.assertThat; -import com.mongodb.client.model.TimeSeriesGranularity; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; -import org.springframework.data.mongodb.core.MongoTemplateUnitTests.TimeSeriesType; -import org.springframework.data.mongodb.core.MongoTemplateUnitTests.TimeSeriesTypeWithDefaults; -import org.springframework.data.mongodb.core.convert.MongoCustomConversions; -import org.springframework.data.mongodb.core.mapping.TimeSeries; -import org.springframework.data.mongodb.core.timeseries.Granularities; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.test.StepVerifier; @@ -77,9 +71,11 @@ import org.springframework.data.mongodb.core.aggregation.Fields; import org.springframework.data.mongodb.core.aggregation.SetOperation; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.TimeSeries; import org.springframework.data.mongodb.core.mapping.event.AbstractMongoEventListener; import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterConvertCallback; @@ -93,6 +89,7 @@ import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.timeseries.Granularity; import org.springframework.lang.Nullable; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.util.CollectionUtils; @@ -106,6 +103,7 @@ import com.mongodb.client.model.FindOneAndReplaceOptions; import com.mongodb.client.model.FindOneAndUpdateOptions; import com.mongodb.client.model.ReplaceOptions; +import com.mongodb.client.model.TimeSeriesGranularity; import com.mongodb.client.model.UpdateOptions; import com.mongodb.client.result.DeleteResult; import com.mongodb.client.result.InsertManyResult; @@ -951,7 +949,8 @@ void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForF @Test // DATAMONGO-2344, DATAMONGO-2572 void allowSecondaryReadsQueryOptionShouldApplyPrimaryPreferredReadPreferenceForFindDistinct() { - template.findDistinct(new Query().allowSecondaryReads(), "name", AutogenerateableId.class, String.class).subscribe(); + template.findDistinct(new Query().allowSecondaryReads(), "name", AutogenerateableId.class, String.class) + .subscribe(); verify(collection).withReadPreference(eq(ReadPreference.primaryPreferred())); } @@ -1428,8 +1427,7 @@ void insertErrorsOnPublisher() { Publisher publisher = Mono.just("data"); - assertThatExceptionOfType(IllegalArgumentException.class) - .isThrownBy(() -> template.insert(publisher)); + assertThatExceptionOfType(IllegalArgumentException.class).isThrownBy(() -> template.insert(publisher)); } @Test // GH-3731 @@ -1453,7 +1451,8 @@ void createCollectionShouldSetUpTimeSeries() { verify(db).createCollection(any(), options.capture()); assertThat(options.getValue().getTimeSeriesOptions().toString()) - .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").metaField("meta").granularity(TimeSeriesGranularity.HOURS).toString()); + .isEqualTo(new com.mongodb.client.model.TimeSeriesOptions("time_stamp").metaField("meta") + .granularity(TimeSeriesGranularity.HOURS).toString()); } private void stubFindSubscribe(Document document) { @@ -1520,13 +1519,12 @@ static class TimeSeriesTypeWithDefaults { Instant timestamp; } - @TimeSeries(timeField = "timestamp", metaField = "meta", granularity = Granularities.HOURS) + @TimeSeries(timeField = "timestamp", metaField = "meta", granularity = Granularity.HOURS) static class TimeSeriesType { String id; - @Field("time_stamp") - Instant timestamp; + @Field("time_stamp") Instant timestamp; Object meta; } diff --git a/src/main/asciidoc/reference/time-series.adoc b/src/main/asciidoc/reference/time-series.adoc index ac36e4026e..54601a8ed1 100644 --- a/src/main/asciidoc/reference/time-series.adoc +++ b/src/main/asciidoc/reference/time-series.adoc @@ -1,13 +1,14 @@ [[time-series]] == Time Series -MongoDB 5.0 introduced https://docs.mongodb.com/manual/core/timeseries-collections/[Time Series] collections optimized to efficiently store sequences of measurements. -Those collections need to be actively created before inserting any data. This can be done by manually executing the command, defining time series collection options or extracting options from a `@TimeSeries` annotation as shown in the examples below. +MongoDB 5.0 introduced https://docs.mongodb.com/manual/core/timeseries-collections/[Time Series] collections that are optimized to efficiently store documents over time such as measurements or events. +Those collections need to be created as such before inserting any data. +Collections can be created by either running the `createCollection` command, defining time series collection options or extracting options from a `@TimeSeries` annotation as shown in the examples below. .Create a Time Series Collection ==== .Create a Time Series via the MongoDB Driver -[code, java] +[code,java] ---- template.execute(db -> { @@ -19,14 +20,14 @@ template.execute(db -> { }); ---- -.Create a Time Series Collection with CollectionOptions -[code, java] +.Create a Time Series Collection with `CollectionOptions` +[code,java] ---- template.createCollection("weather", CollectionOptions.timeSeries("timestamp")); ---- .Create a Time Series Collection derived from an Annotation -[code, java] +[code,java] ---- @TimeSeries(collection="weather", timeField = "timestamp") public class Measurement { @@ -41,5 +42,5 @@ template.createCollection(Measurement.class); ==== The snippets above can easily be transferred to the reactive API offering the very same methods. -Just make sure to _subscribe_. +Make sure to properly _subscribe_ to the returned publishers. From 9db9d16cf8348db8ab86e9edf6b48dd2ab367db3 Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Fri, 16 Jul 2021 10:18:49 +0200 Subject: [PATCH 059/983] Updated changelog. See #3681 --- src/main/resources/changelog.txt | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/main/resources/changelog.txt b/src/main/resources/changelog.txt index 18ac26a430..929482612f 100644 --- a/src/main/resources/changelog.txt +++ b/src/main/resources/changelog.txt @@ -1,6 +1,12 @@ Spring Data MongoDB Changelog ============================= +Changes in version 3.1.11 (2021-07-16) +-------------------------------------- +* #3689 - Fix Regression in generating queries with nested maps with numeric keys. +* #3688 - Multiple maps with numeric keys in a single update produces the wrong query (Regression). + + Changes in version 3.2.2 (2021-06-22) ------------------------------------- * #3677 - Add missing double quote to GeoJson.java JSDoc header. @@ -3464,5 +3470,6 @@ Repository + From e875f9ea334567408b01fb0233b5854fc035ee93 Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Fri, 16 Jul 2021 14:08:43 +0200 Subject: [PATCH 060/983] Updated changelog. See #3631 --- src/main/resources/changelog.txt | 35 ++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/src/main/resources/changelog.txt b/src/main/resources/changelog.txt index 929482612f..dff169e6fc 100644 --- a/src/main/resources/changelog.txt +++ b/src/main/resources/changelog.txt @@ -1,6 +1,40 @@ Spring Data MongoDB Changelog ============================= +Changes in version 3.3.0-M1 (2021-07-16) +---------------------------------------- +* #3731 - Add support for creating time series collection. +* #3706 - Upgrade to MongoDB 4.3.0 Drivers. +* #3705 - Adapt to changes in AssertJ 3.20. +* #3702 - `MappingMongoConverter` incorrectly processes an object property of type `org.bson.Document`. +* #3700 - Adapt to consolidated PersistentEntity API. +* #3693 - Upgrade to MongoDB 4.3.0-beta4 Drivers. +* #3689 - Fix Regression in generating queries with nested maps with numeric keys. +* #3688 - Multiple maps with numeric keys in a single update produces the wrong query (Regression). +* #3686 - reading a document with a list with a null element fails with Spring Data Mongo 3.2.2, works with 3.2.1. +* #3684 - Add equals and hashcode to UnwrappedMongoPersistentProperty (fixes #3683). +* #3683 - Memory Leak: instances of UnwrappedMongoPersistentProperty are accumulating in PreferredConstructor.isPropertyParameterCache. +* #3677 - Add missing double quote to GeoJson.java JSDoc header. +* #3674 - Upgrade to Querydsl 5.0. +* #3672 - Directly import JSR305 jar. +* #3670 - `Binary` not deserialized to `byte[]` for property of type `Object`. +* #3668 - Projection on the _id field returns wrong result when using `@MongoId` (MongoDB 4.4). +* #3666 - Documentation references outdated `Mongo` client. +* #3660 - MappingMongoConverter problem: ConversionContext#convert does not try to use custom converters first. +* #3659 - [3.2.1] Indexing Class with Custom Converter -> Couldn't find PersistentEntity for property private [...]. +* #3656 - Fix Build on Java 16. +* #3648 - Inconsistent nullability of read() in templates: not sure if mapper can skip a document. +* #3638 - Introduce template method for easier customization of fragments. +* #3635 - $floor isOrOrNor() return true. +* #3633 - NPE in QueryMapper when use Query with `null` as value. +* #3632 - Fix bullet points in aggregations framework asciidoc. +* #3603 - Update CI to Java 16. +* #3602 - Add support for flexible document references. +* #3543 - Aggregation query method should be able to return `Slice` and `Stream`. +* #3407 - Add an option to @Field annotation to control property write rules [DATAMONGO-2551]. +* #3225 - Add support for Wildcard Indexes [DATAMONGO-2368]. + + Changes in version 3.1.11 (2021-07-16) -------------------------------------- * #3689 - Fix Regression in generating queries with nested maps with numeric keys. @@ -3471,5 +3505,6 @@ Repository + From b6ad32d7d4dd6bd6433695425349c282337c77ee Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Fri, 16 Jul 2021 14:08:59 +0200 Subject: [PATCH 061/983] Prepare 3.3 M1 (2021.1.0). See #3631 --- pom.xml | 8 ++++---- src/main/resources/notice.txt | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/pom.xml b/pom.xml index b688f3ee50..382f4470c9 100644 --- a/pom.xml +++ b/pom.xml @@ -15,7 +15,7 @@ org.springframework.data.build spring-data-parent - 2.6.0-SNAPSHOT + 2.6.0-M1 @@ -26,7 +26,7 @@ multi spring-data-mongodb - 2.6.0-SNAPSHOT + 2.6.0-M1 4.3.0 ${mongo} 1.19 @@ -134,8 +134,8 @@ - spring-libs-snapshot - https://repo.spring.io/libs-snapshot + spring-libs-milestone + https://repo.spring.io/libs-milestone sonatype-libs-snapshot diff --git a/src/main/resources/notice.txt b/src/main/resources/notice.txt index 07cf1da6a0..624bf48c30 100644 --- a/src/main/resources/notice.txt +++ b/src/main/resources/notice.txt @@ -1,4 +1,4 @@ -Spring Data MongoDB 3.2 GA (2021.0.0) +Spring Data MongoDB 3.3 M1 (2021.1.0) Copyright (c) [2010-2019] Pivotal Software, Inc. This product is licensed to you under the Apache License, Version 2.0 (the "License"). @@ -27,3 +27,4 @@ conditions of the subcomponent's license, as noted in the LICENSE file. + From 4ef1ff6aff2deffb5daa3b8910c4f98ca789660f Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Fri, 16 Jul 2021 14:09:25 +0200 Subject: [PATCH 062/983] Release version 3.3 M1 (2021.1.0). See #3631 --- pom.xml | 2 +- spring-data-mongodb-benchmarks/pom.xml | 2 +- spring-data-mongodb-distribution/pom.xml | 2 +- spring-data-mongodb/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index 382f4470c9..9a83ef35ef 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M1 pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index 0033bd11d5..0557a133c7 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M1 ../pom.xml diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index f62c8dc7f4..d7fbc46ed6 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -14,7 +14,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M1 ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 1f157e75bc..6bb6e8ca56 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M1 ../pom.xml From e7f3a2436d24262b587f0c4fc2aa8ae371e688d3 Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Fri, 16 Jul 2021 14:19:56 +0200 Subject: [PATCH 063/983] Prepare next development iteration. See #3631 --- pom.xml | 2 +- spring-data-mongodb-benchmarks/pom.xml | 2 +- spring-data-mongodb-distribution/pom.xml | 2 +- spring-data-mongodb/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index 9a83ef35ef..382f4470c9 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M1 + 3.3.0-SNAPSHOT pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index 0557a133c7..0033bd11d5 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M1 + 3.3.0-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index d7fbc46ed6..f62c8dc7f4 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -14,7 +14,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M1 + 3.3.0-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 6bb6e8ca56..1f157e75bc 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M1 + 3.3.0-SNAPSHOT ../pom.xml From 4d7ee0e7415be66d59068d5dbfbbb595530acfaa Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Fri, 16 Jul 2021 14:19:58 +0200 Subject: [PATCH 064/983] After release cleanups. See #3631 --- pom.xml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index 382f4470c9..b688f3ee50 100644 --- a/pom.xml +++ b/pom.xml @@ -15,7 +15,7 @@ org.springframework.data.build spring-data-parent - 2.6.0-M1 + 2.6.0-SNAPSHOT @@ -26,7 +26,7 @@ multi spring-data-mongodb - 2.6.0-M1 + 2.6.0-SNAPSHOT 4.3.0 ${mongo} 1.19 @@ -134,8 +134,8 @@ - spring-libs-milestone - https://repo.spring.io/libs-milestone + spring-libs-snapshot + https://repo.spring.io/libs-snapshot sonatype-libs-snapshot From d2c9b47366aca3da4acbd0221f4c076fb81c0b61 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Thu, 22 Jul 2021 09:03:10 +0200 Subject: [PATCH 065/983] Fix issues related to Querydsl 5.0 upgrade. Remove overridden methods no longer available in public api. Closes: #3738 --- .../support/QuerydslAbstractMongodbQuery.java | 10 ---------- .../support/SpringDataMongodbQuerySupport.java | 5 ----- 2 files changed, 15 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAbstractMongodbQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAbstractMongodbQuery.java index b255d20273..80c485ea3f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAbstractMongodbQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslAbstractMongodbQuery.java @@ -177,16 +177,6 @@ protected Document createSort(List> orderSpecifiers) { return serializer.toSort(orderSpecifiers); } - /** - * Get the actual {@link QueryMixin} delegate. - * - * @return - */ - QueryMixin getQueryMixin() { - return queryMixin; - } - - /** * Returns the {@literal Mongo Shell} representation of the query.
        * The following query diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java index 406019cf4d..be9260df48 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SpringDataMongodbQuerySupport.java @@ -139,9 +139,4 @@ public String toJson(JsonWriterSettings settings) { protected Document createSort(List> orderSpecifiers) { return serializer.toSort(orderSpecifiers); } - - // TODO: Remove once https://github.com/querydsl/querydsl/pull/2916 is merged - QueryMixin getQueryMixin() { - return superQueryMixin; - } } From 68370c16fb288034f68b2ca2d38f5480e1656e2f Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Mon, 26 Jul 2021 14:14:31 +0200 Subject: [PATCH 066/983] =?UTF-8?q?Run=20unpaged=20query=20using=20Pageabl?= =?UTF-8?q?e.unpaged()=20through=20QuerydslMongoPredicateExecutor.findAll(?= =?UTF-8?q?=E2=80=A6).?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit We now correctly consider unpaged queries if the Pageable is unpaged. Closes: #3751 Original Pull Request: #3754 --- .../support/QuerydslMongoPredicateExecutor.java | 4 ++++ ...slMongoPredicateExecutorIntegrationTests.java | 16 ++++++++++++++++ 2 files changed, 20 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java index 95d2299670..d92d7ad129 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java @@ -212,6 +212,10 @@ private SpringDataMongodbQuery createQuery() { */ private SpringDataMongodbQuery applyPagination(SpringDataMongodbQuery query, Pageable pageable) { + if (pageable.isUnpaged()) { + return query; + } + query = query.offset(pageable.getOffset()).limit(pageable.getPageSize()); return applySorting(query, pageable.getSort()); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutorIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutorIntegrationTests.java index 6b46618fdb..782e46b134 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutorIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutorIntegrationTests.java @@ -27,6 +27,8 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.springframework.dao.PermissionDeniedDataAccessException; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; import org.springframework.data.mongodb.MongoDatabaseFactory; @@ -122,6 +124,20 @@ public void findUsingAndShouldWork() { .containsExactly(dave); } + @Test // GH-3751 + public void findPage() { + + assertThat(repository + .findAll(person.lastname.startsWith(oliver.getLastname()).and(person.firstname.startsWith(dave.getFirstname())), + PageRequest.of(0, 10)) + .getContent()).containsExactly(dave); + + assertThat(repository + .findAll(person.lastname.startsWith(oliver.getLastname()).and(person.firstname.startsWith(dave.getFirstname())), + Pageable.unpaged()) + .getContent()).containsExactly(dave); + } + @Test // DATAMONGO-362, DATAMONGO-1848 public void springDataMongodbQueryShouldAllowJoinOnDBref() { From 45971b212c12c67e4233d1b139de06ba088e18ee Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Mon, 26 Jul 2021 14:15:24 +0200 Subject: [PATCH 067/983] Polishing. Move off deprecated classes. Add unpaged testcase for query by example. Original Pull Request: #3754 --- .../support/QuerydslMongoPredicateExecutor.java | 2 +- .../support/SimpleMongoRepository.java | 2 +- .../support/SimpleMongoRepositoryTests.java | 16 +++++++++++++++- 3 files changed, 17 insertions(+), 3 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java index d92d7ad129..569273afb5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/QuerydslMongoPredicateExecutor.java @@ -27,7 +27,7 @@ import org.springframework.data.querydsl.EntityPathResolver; import org.springframework.data.querydsl.QuerydslPredicateExecutor; import org.springframework.data.querydsl.SimpleEntityPathResolver; -import org.springframework.data.repository.support.PageableExecutionUtils; +import org.springframework.data.support.PageableExecutionUtils; import org.springframework.util.Assert; import com.querydsl.core.NonUniqueResultException; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepository.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepository.java index 4ffba8a6a3..1443474b8f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepository.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepository.java @@ -36,7 +36,7 @@ import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.repository.MongoRepository; import org.springframework.data.mongodb.repository.query.MongoEntityInformation; -import org.springframework.data.repository.support.PageableExecutionUtils; +import org.springframework.data.support.PageableExecutionUtils; import org.springframework.data.util.StreamUtils; import org.springframework.data.util.Streamable; import org.springframework.lang.Nullable; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryTests.java index 135b6b3888..61cd78ea93 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/support/SimpleMongoRepositoryTests.java @@ -30,10 +30,11 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; + import org.springframework.data.domain.Example; import org.springframework.data.domain.Page; import org.springframework.data.domain.PageRequest; -import org.springframework.data.domain.ExampleMatcher.*; +import org.springframework.data.domain.Pageable; import org.springframework.data.geo.Point; import org.springframework.data.mongodb.MongoTransactionManager; import org.springframework.data.mongodb.core.geo.GeoJsonPoint; @@ -177,6 +178,19 @@ void findByExampleShouldLookUpEntriesCorrectly() { assertThat(result.getTotalPages()).isEqualTo(1); } + @Test // GH-3751 + void findByExampleShouldReturnUnpagedResults() { + + Person sample = new Person(); + sample.setLastname("Matthews"); + trimDomainType(sample, "id", "createdAt", "email"); + + Page result = repository.findAll(Example.of(sample), Pageable.unpaged()); + + assertThat(result.getContent()).hasSize(2).contains(dave, oliver); + assertThat(result.getTotalPages()).isEqualTo(1); + } + @Test // DATAMONGO-1464 void findByExampleMultiplePagesShouldLookUpEntriesCorrectly() { From 454afd9877b6a7d2c164461f766a294cd907141f Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Thu, 12 Aug 2021 15:02:56 +0200 Subject: [PATCH 068/983] Prepare 3.3 M2 (2021.1.0). See #3736 --- pom.xml | 8 ++++---- src/main/resources/notice.txt | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/pom.xml b/pom.xml index b688f3ee50..b6477961a1 100644 --- a/pom.xml +++ b/pom.xml @@ -15,7 +15,7 @@ org.springframework.data.build spring-data-parent - 2.6.0-SNAPSHOT + 2.6.0-M2 @@ -26,7 +26,7 @@ multi spring-data-mongodb - 2.6.0-SNAPSHOT + 2.6.0-M2 4.3.0 ${mongo} 1.19 @@ -134,8 +134,8 @@ - spring-libs-snapshot - https://repo.spring.io/libs-snapshot + spring-libs-milestone + https://repo.spring.io/libs-milestone sonatype-libs-snapshot diff --git a/src/main/resources/notice.txt b/src/main/resources/notice.txt index 624bf48c30..29628c3570 100644 --- a/src/main/resources/notice.txt +++ b/src/main/resources/notice.txt @@ -1,4 +1,4 @@ -Spring Data MongoDB 3.3 M1 (2021.1.0) +Spring Data MongoDB 3.3 M2 (2021.1.0) Copyright (c) [2010-2019] Pivotal Software, Inc. This product is licensed to you under the Apache License, Version 2.0 (the "License"). @@ -27,4 +27,5 @@ conditions of the subcomponent's license, as noted in the LICENSE file. + From 87ab1ac48ceaec1f014a6f50148cb6b4d9c20544 Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Thu, 12 Aug 2021 15:03:17 +0200 Subject: [PATCH 069/983] Release version 3.3 M2 (2021.1.0). See #3736 --- pom.xml | 2 +- spring-data-mongodb-benchmarks/pom.xml | 2 +- spring-data-mongodb-distribution/pom.xml | 2 +- spring-data-mongodb/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index b6477961a1..ff938bcceb 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M2 pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index 0033bd11d5..31260b443f 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M2 ../pom.xml diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index f62c8dc7f4..8e0bf96e21 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -14,7 +14,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M2 ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 1f157e75bc..42bf7aa081 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M2 ../pom.xml From 828c07416794d75c8e41753a53e1c2b3d96ac565 Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Thu, 12 Aug 2021 15:16:21 +0200 Subject: [PATCH 070/983] Prepare next development iteration. See #3736 --- pom.xml | 2 +- spring-data-mongodb-benchmarks/pom.xml | 2 +- spring-data-mongodb-distribution/pom.xml | 2 +- spring-data-mongodb/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index ff938bcceb..b6477961a1 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M2 + 3.3.0-SNAPSHOT pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index 31260b443f..0033bd11d5 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M2 + 3.3.0-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index 8e0bf96e21..f62c8dc7f4 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -14,7 +14,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M2 + 3.3.0-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 42bf7aa081..1f157e75bc 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M2 + 3.3.0-SNAPSHOT ../pom.xml From 7538b1a1a57a6fb4d1a2cfc54fda095a5d29ee8e Mon Sep 17 00:00:00 2001 From: Jens Schauder Date: Thu, 12 Aug 2021 15:16:23 +0200 Subject: [PATCH 071/983] After release cleanups. See #3736 --- pom.xml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index b6477961a1..b688f3ee50 100644 --- a/pom.xml +++ b/pom.xml @@ -15,7 +15,7 @@ org.springframework.data.build spring-data-parent - 2.6.0-M2 + 2.6.0-SNAPSHOT @@ -26,7 +26,7 @@ multi spring-data-mongodb - 2.6.0-M2 + 2.6.0-SNAPSHOT 4.3.0 ${mongo} 1.19 @@ -134,8 +134,8 @@ - spring-libs-milestone - https://repo.spring.io/libs-milestone + spring-libs-snapshot + https://repo.spring.io/libs-snapshot sonatype-libs-snapshot From 1d943d62a370267244cdabbb04160418532671fe Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 26 Jul 2021 13:10:15 +0200 Subject: [PATCH 072/983] Fix build on Java 16. Make sure to use an initialized MappingContext. Closes: #3749 Original pull request: #3752. --- .../core/EntityOperationsUnitTests.java | 3 +- .../data/mongodb/core/MongoTemplateTests.java | 2 +- .../test/util/MappingContextConfigurer.java | 50 ++++++++++++ .../test/util/MongoConverterConfigurer.java | 40 ++++++++++ .../test/util/MongoTestMappingContext.java | 78 +++++++++++++++++++ .../mongodb/test/util/MongoTestTemplate.java | 2 +- .../util/MongoTestTemplateConfiguration.java | 51 +----------- .../test/util/ReactiveMongoTestTemplate.java | 2 +- 8 files changed, 174 insertions(+), 54 deletions(-) create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MappingContextConfigurer.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoConverterConfigurer.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestMappingContext.java diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java index 901ac1f0dd..160a598bc7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/EntityOperationsUnitTests.java @@ -24,6 +24,7 @@ import org.springframework.data.mapping.MappingException; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.TimeSeries; +import org.springframework.data.mongodb.test.util.MongoTestMappingContext; /** * Unit tests for {@link EntityOperations}. @@ -32,7 +33,7 @@ */ class EntityOperationsUnitTests { - EntityOperations operations = new EntityOperations(new MongoMappingContext()); + EntityOperations operations = new EntityOperations(MongoTestMappingContext.newTestContext()); @Test // GH-3731 void shouldReportInvalidTimeField() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java index f5521008f8..28cdaa4830 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java @@ -138,7 +138,7 @@ public class MongoTemplateTests { cfg.configureMappingContext(it -> { it.autocreateIndex(false); - it.intitalEntitySet(AuditablePerson.class); + it.initialEntitySet(AuditablePerson.class); }); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MappingContextConfigurer.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MappingContextConfigurer.java new file mode 100644 index 0000000000..75169f5e45 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MappingContextConfigurer.java @@ -0,0 +1,50 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.Set; + +import org.springframework.lang.Nullable; + +/** + * Utility to configure {@link org.springframework.data.mongodb.core.mapping.MongoMappingContext} properties. + * + * @author Christoph Strobl + */ +public class MappingContextConfigurer { + + private @Nullable Set> intitalEntitySet; + boolean autocreateIndex = false; + + public void autocreateIndex(boolean autocreateIndex) { + this.autocreateIndex = autocreateIndex; + } + + public void initialEntitySet(Set> initialEntitySet) { + this.intitalEntitySet = initialEntitySet; + } + + public void initialEntitySet(Class... initialEntitySet) { + this.intitalEntitySet = new HashSet<>(Arrays.asList(initialEntitySet)); + } + + Set> initialEntitySet() { + return intitalEntitySet != null ? intitalEntitySet : Collections.emptySet(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoConverterConfigurer.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoConverterConfigurer.java new file mode 100644 index 0000000000..7129d9951a --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoConverterConfigurer.java @@ -0,0 +1,40 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.util.Arrays; + +import org.springframework.core.convert.converter.Converter; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; + +/** + * Utility to configure {@link MongoCustomConversions}. + * + * @author Christoph Strobl + */ +public class MongoConverterConfigurer { + + CustomConversions customConversions; + + public void customConversions(CustomConversions customConversions) { + this.customConversions = customConversions; + } + + public void customConverters(Converter... converters) { + customConversions(new MongoCustomConversions(Arrays.asList(converters))); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestMappingContext.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestMappingContext.java new file mode 100644 index 0000000000..f9701d24aa --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestMappingContext.java @@ -0,0 +1,78 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.test.util; + +import java.util.Collections; +import java.util.function.Consumer; + +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; + +/** + * @author Christoph Strobl + */ +public class MongoTestMappingContext extends MongoMappingContext { + + private MappingContextConfigurer contextConfigurer; + private MongoConverterConfigurer converterConfigurer; + + public static MongoTestMappingContext newTestContext() { + return new MongoTestMappingContext(conig -> {}).init(); + } + + public MongoTestMappingContext(MappingContextConfigurer contextConfig) { + + this.contextConfigurer = contextConfig; + this.converterConfigurer = new MongoConverterConfigurer(); + } + + public MongoTestMappingContext(Consumer contextConfig) { + + this(new MappingContextConfigurer()); + contextConfig.accept(contextConfigurer); + } + + public MongoTestMappingContext customConversions(MongoConverterConfigurer converterConfig) { + + this.converterConfigurer = converterConfig; + return this; + } + + public MongoTestMappingContext customConversions(Consumer converterConfig) { + + converterConfig.accept(converterConfigurer); + return this; + } + + public MongoTestMappingContext init() { + + setInitialEntitySet(contextConfigurer.initialEntitySet()); + setAutoIndexCreation(contextConfigurer.autocreateIndex); + if (converterConfigurer.customConversions != null) { + setSimpleTypeHolder(converterConfigurer.customConversions.getSimpleTypeHolder()); + } else { + setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); + } + + super.afterPropertiesSet(); + return this; + } + + @Override + public void afterPropertiesSet() { + init(); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplate.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplate.java index ff1363965d..c612319e55 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplate.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplate.java @@ -50,7 +50,7 @@ public MongoTestTemplate(MongoClient client, String database, Class... initia cfg.configureMappingContext(it -> { it.autocreateIndex(false); - it.intitalEntitySet(initialEntities); + it.initialEntitySet(initialEntities); }); }); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java index ee75da8b19..b50ff88133 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestTemplateConfiguration.java @@ -16,20 +16,15 @@ package org.springframework.data.mongodb.test.util; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; -import java.util.HashSet; import java.util.List; -import java.util.Set; import java.util.function.Consumer; import java.util.function.Function; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationListener; import org.springframework.context.ConfigurableApplicationContext; -import org.springframework.core.convert.converter.Converter; import org.springframework.data.auditing.IsNewAwareAuditingHandler; -import org.springframework.data.convert.CustomConversions; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; @@ -115,16 +110,7 @@ ApplicationContext getApplicationContext() { MongoMappingContext mappingContext() { if (mappingContext == null) { - - mappingContext = new MongoMappingContext(); - mappingContext.setInitialEntitySet(mappingContextConfigurer.initialEntitySet()); - mappingContext.setAutoIndexCreation(mappingContextConfigurer.autocreateIndex); - if(mongoConverterConfigurer.customConversions != null) { - mappingContext.setSimpleTypeHolder(mongoConverterConfigurer.customConversions.getSimpleTypeHolder()); - } else { - mappingContext.setSimpleTypeHolder(new MongoCustomConversions(Collections.emptyList()).getSimpleTypeHolder()); - } - mappingContext.afterPropertiesSet(); + mappingContext = new MongoTestMappingContext(mappingContextConfigurer).customConversions(mongoConverterConfigurer).init(); } return mappingContext; @@ -222,41 +208,6 @@ public void defaultDb(String defaultDatabase) { } } - public static class MongoConverterConfigurer { - - CustomConversions customConversions; - - public void customConversions(CustomConversions customConversions) { - this.customConversions = customConversions; - } - - public void customConverters(Converter... converters) { - customConversions(new MongoCustomConversions(Arrays.asList(converters))); - } - } - - public static class MappingContextConfigurer { - - Set> intitalEntitySet; - boolean autocreateIndex = false; - - public void autocreateIndex(boolean autocreateIndex) { - this.autocreateIndex = autocreateIndex; - } - - public void intitalEntitySet(Set> intitalEntitySet) { - this.intitalEntitySet = intitalEntitySet; - } - - public void intitalEntitySet(Class... initialEntitySet) { - this.intitalEntitySet = new HashSet<>(Arrays.asList(initialEntitySet)); - } - - Set> initialEntitySet() { - return intitalEntitySet != null ? intitalEntitySet : Collections.emptySet(); - } - } - public static class AuditingConfigurer { Function auditingHandlerFunction; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoTestTemplate.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoTestTemplate.java index 9e7d2bbbfa..774493322e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoTestTemplate.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/ReactiveMongoTestTemplate.java @@ -56,7 +56,7 @@ public ReactiveMongoTestTemplate(MongoClient client, String database, Class.. cfg.configureMappingContext(it -> { it.autocreateIndex(false); - it.intitalEntitySet(initialEntities); + it.initialEntitySet(initialEntities); }); }); } From 255491c4468898be440cf51e727c53669e6de9ef Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 23 Aug 2021 09:31:25 +0200 Subject: [PATCH 073/983] Upgrade to MongoDB 4.3.1 Drivers. Closes: #3778 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index b688f3ee50..5d28c8a5c5 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ multi spring-data-mongodb 2.6.0-SNAPSHOT - 4.3.0 + 4.3.1 ${mongo} 1.19 From 23254c10dc7a11b5fe1c7937c889d42baa3fcee0 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Fri, 16 Jul 2021 11:37:08 +0200 Subject: [PATCH 074/983] Add support for `$setWindowFields` aggregation stage. Add a SetWindowFieldsOperation to the aggregation framework. The builder API allows fluent declaration of the aggregation stage as shown in the sample below. SetWindowFieldsOperation.builder() .partitionByField("state") .sortBy(Sort.by(Direction.ASC, "date")) .output(AccumulatorOperators.valueOf("qty").sum()) .within(Windows.documents().fromUnbounded().toCurrent().build()) .as("cumulativeQuantityForState") .build(); Closes #3711 Original pull request: #3739. --- .../aggregation/SetWindowFieldsOperation.java | 783 ++++++++++++++++++ .../SetWindowFieldsOperationTests.java | 132 +++ .../SetWindowFieldsOperationUnitTests.java | 111 +++ 3 files changed, 1026 insertions(+) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java new file mode 100644 index 0000000000..0f0909beb2 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java @@ -0,0 +1,783 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.bson.Document; +import org.springframework.data.domain.Sort; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * @author Christoph Strobl + * @since 3.3 + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/setWindowFields/ + */ +public class SetWindowFieldsOperation + implements AggregationOperation, FieldsExposingAggregationOperation.InheritsFieldsAggregationOperation { + + @Nullable // + private Object partitionBy; + + @Nullable // + private AggregationOperation sortBy; + + private WindowOutput output; + + /** + * Create a new {@link SetWindowFieldsOperation} with given args. + * + * @param partitionBy The field or {@link AggregationExpression} to group by. + * @param sortBy the {@link SortOperation operation} to sort the documents by in the partition. + * @param output the {@link WindowOutput} containing the fields to add and the rules to calculate their respective + * values. + */ + public SetWindowFieldsOperation(@Nullable Object partitionBy, @Nullable AggregationOperation sortBy, + WindowOutput output) { + + this.partitionBy = partitionBy; + this.sortBy = sortBy; + this.output = output; + } + + /** + * Obtain a {@link SetWindowFieldsOperationBuilder builder} to create a {@link SetWindowFieldsOperation}. + * + * @return new instance of {@link SetWindowFieldsOperationBuilder}. + */ + public static SetWindowFieldsOperationBuilder builder() { + return new SetWindowFieldsOperationBuilder(); + } + + @Override + public ExposedFields getFields() { + return ExposedFields.nonSynthetic(Fields.from(output.fields.toArray(new Field[0]))); + } + + @Override + public Document toDocument(AggregationOperationContext context) { + + Document $setWindowFields = new Document(); + if (partitionBy != null) { + if (partitionBy instanceof AggregationExpression) { + $setWindowFields.append("partitionBy", ((AggregationExpression) partitionBy).toDocument(context)); + } else if (partitionBy instanceof Field) { + $setWindowFields.append("partitionBy", context.getReference((Field) partitionBy).toString()); + } else { + $setWindowFields.append("partitionBy", partitionBy); + } + } + + if (sortBy != null) { + $setWindowFields.append("sortBy", sortBy.toDocument(context).get(sortBy.getOperator())); + } + + Document output = new Document(); + for (ComputedField field : this.output.fields) { + + Document fieldOperation = field.getWindowOperator().toDocument(context); + if (field.window != null) { + fieldOperation.put("window", field.window.toDocument(context)); + } + output.append(field.getName(), fieldOperation); + } + $setWindowFields.append("output", output); + + return new Document(getOperator(), $setWindowFields); + } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#getOperator() + */ + @Override + public String getOperator() { + return "$setWindowFields"; + } + + /** + * {@link WindowOutput} defines output of {@literal $setWindowFields} stage by defining the {@link ComputedField + * field(s)} to append to the documents in the output. + */ + public static class WindowOutput { + + private List fields; + + /** + * Create a new output containing the single given {@link ComputedField field}. + * + * @param outputField must not be {@literal null}. + */ + public WindowOutput(ComputedField outputField) { + + Assert.notNull(outputField, "OutputField must not be null!"); + this.fields = new ArrayList<>(); + this.fields.add(outputField); + } + + /** + * Append the given {@link ComputedField field} to the outptut. + * + * @param field must not be {@literal null}. + * @return this. + */ + public WindowOutput append(ComputedField field) { + + Assert.notNull(field, "Field must not be null!"); + fields.add(field); + return this; + } + + /** + * Append the given {@link AggregationExpression} as a {@link ComputedField field} in a fluent way. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ComputedFieldAppender}. + * @see #append(ComputedField) + */ + public ComputedFieldAppender append(AggregationExpression expression) { + + return new ComputedFieldAppender() { + + @Nullable private Window window; + + @Override + public WindowOutput as(String fieldname) { + + return WindowOutput.this.append(new ComputedField(fieldname, expression, window)); + } + + @Override + public ComputedFieldAppender within(Window window) { + this.window = window; + return this; + } + }; + } + + /** + * Tiny little helper to allow fluent API usage for {@link #append(ComputedField)}. + */ + interface ComputedFieldAppender { + + /** + * Specify the target field name. + * + * @param fieldname the name of field to add to the target document. + * @return the {@link WindowOutput} that started the append operation. + */ + WindowOutput as(String fieldname); + + /** + * Specify the window boundaries. + * + * @param window must not be {@literal null}. + * @return this. + */ + ComputedFieldAppender within(Window window); + } + } + + /** + * A {@link Field} that the result of a computation done via an {@link AggregationExpression}. + * + * @author Christoph Strobl + */ + public static class ComputedField implements Field { + + private String name; + private AggregationExpression windowOperator; + + @Nullable // + private Window window; + + /** + * Create a new {@link ComputedField}. + * + * @param name the target field name. + * @param windowOperator the expression to calculate the field value. + */ + public ComputedField(String name, AggregationExpression windowOperator) { + this(name, windowOperator, null); + } + + /** + * Create a new {@link ComputedField}. + * + * @param name the target field name. + * @param windowOperator the expression to calculate the field value. + * @param window the boundaries to operate within. Can be {@literal null}. + */ + public ComputedField(String name, AggregationExpression windowOperator, @Nullable Window window) { + + this.name = name; + this.windowOperator = windowOperator; + this.window = window; + } + + @Override + public String getName() { + return name; + } + + @Override + public String getTarget() { + return getName(); + } + + @Override + public boolean isAliased() { + return false; + } + + public AggregationExpression getWindowOperator() { + return windowOperator; + } + + public Window getWindow() { + return window; + } + } + + /** + * Quick access to {@link DocumentWindow documents} and {@literal RangeWindow range} {@link Window windows}. + * + * @author Christoph Strobl + */ + public interface Windows { + + /** + * Create a document window relative to the position of the current document. + * + * @param lower an integer for a position relative to the current document, {@literal current} or + * {@literal unbounded}. + * @param upper an integer for a position relative to the current document, {@literal current} or + * {@literal unbounded}. + * @return new instance of {@link DocumentWindow}. + */ + static DocumentWindow documents(Object lower, Object upper) { + return new DocumentWindow(lower, upper); + } + + /** + * Create a range window defined based on sort expression. + * + * @param lower a numeric value to add the sort by field value of the current document, {@literal current} or + * {@literal unbounded}. + * @param upper a numeric value to add the sort by field value of the current document, {@literal current} or + * {@literal unbounded}. + * @return new instance of {@link RangeWindow}. + */ + static RangeWindow range(Object lower, Object upper, @Nullable WindowUnit unit) { + return new RangeWindow(lower, upper, unit); + } + + /** + * Create a range window based on the {@link Sort sort value} of the current document via a fluent API. + * + * @return new instance of {@link RangeWindowBuilder}. + */ + static RangeWindowBuilder range() { + return new RangeWindowBuilder(); + } + + /** + * Create a document window relative to the position of the current document via a fluent API. + * + * @return new instance of {@link DocumentWindowBuilder}. + */ + static DocumentWindowBuilder documents() { + return new DocumentWindowBuilder(); + } + } + + /** + * A {@link Window} to be used for {@link ComputedField#getWindow() ComputedField}. + */ + public interface Window { + + /** + * The upper (inclusive) boundary. + * + * @return + */ + Object getUpper(); + + /** + * The lower (inclusive) boundary. + * + * @return + */ + Object getLower(); + + /** + * Obtain the document representation of the window in a default {@link AggregationOperationContext context}. + * + * @return never {@literal null}. + */ + default Document toDocument() { + return toDocument(Aggregation.DEFAULT_CONTEXT); + } + + /** + * Obtain the document representation of the window in the given {@link AggregationOperationContext context}. + * + * @return never {@literal null}. + */ + Document toDocument(AggregationOperationContext ctx); + } + + /** + * Builder API for a {@link RangeWindow}. + * + * @author Christoph Strobl + */ + public static class RangeWindowBuilder { + + @Nullable // + private Object upper; + + @Nullable // + private Object lower; + + @Nullable // + private WindowUnit unit; + + /** + * The upper (inclusive) range limit based on the sortBy field. + * + * @param upper eg. {@literal current} or {@literal unbounded}. + * @return this. + */ + public RangeWindowBuilder to(String upper) { + + this.upper = upper; + return this; + } + + /** + * The lower (inclusive) range limit based on the sortBy field. + * + * @param lower eg. {@literal current} or {@literal unbounded}. + * @return this. + */ + public RangeWindowBuilder from(String lower) { + + this.lower = lower; + return this; + } + + /** + * The upper (inclusive) range limit value to add to the value based on the sortBy field. + * + * @param upper + * @return this. + */ + public RangeWindowBuilder to(Number upper) { + + this.upper = upper; + return this; + } + + /** + * The lower (inclusive) range limit value to add to the value based on the sortBy field. + * + * @param lower + * @return this. + */ + public RangeWindowBuilder from(Number lower) { + + this.lower = lower; + return this; + } + + /** + * Use {@literal current} as {@link #from(String) lower} limit. + * + * @return this. + */ + public RangeWindowBuilder fromCurrent() { + return from("current"); + } + + /** + * Use {@literal unbounded} as {@link #from(String) lower} limit. + * + * @return this. + */ + public RangeWindowBuilder fromUnbounded() { + return from("unbounded"); + } + + /** + * Use {@literal current} as {@link #to(String) upper} limit. + * + * @return this. + */ + public RangeWindowBuilder toCurrent() { + return to("current"); + } + + /** + * Use {@literal unbounded} as {@link #to(String) upper} limit. + * + * @return this. + */ + public RangeWindowBuilder toUnbounded() { + return to("unbounded"); + } + + /** + * Set the {@link WindowUnit unit} or measure for the given {@link Window}. + * + * @param windowUnit must not be {@literal null}. Can be on of {@link Windows}. + * @return this. + */ + public RangeWindowBuilder unit(WindowUnit windowUnit) { + + this.unit = unit; + return this; + } + + /** + * Build the {@link RangeWindow}. + * + * @return new instance of {@link RangeWindow}. + */ + public RangeWindow build() { + return new RangeWindow(lower, upper, unit); + } + } + + /** + * Builder API for a {@link RangeWindow}. + * + * @author Christoph Strobl + */ + public static class DocumentWindowBuilder { + + @Nullable // + private Object upper; + + @Nullable // + private Object lower; + + public DocumentWindowBuilder from(Number lower) { + + this.lower = lower; + return this; + } + + public DocumentWindowBuilder fromCurrent() { + return from("current"); + } + + public DocumentWindowBuilder fromUnbounded() { + return from("unbounded"); + } + + public DocumentWindowBuilder to(String upper) { + + this.upper = upper; + return this; + } + + /** + * The lower (inclusive) range limit based on current document. + * + * @param lower eg. {@literal current} or {@literal unbounded}. + * @return this. + */ + public DocumentWindowBuilder from(String lower) { + + this.lower = lower; + return this; + } + + /** + * The upper (inclusive) range limit based on current document. + * + * @param upper eg. {@literal current} or {@literal unbounded}. + * @return this. + */ + public DocumentWindowBuilder to(Number upper) { + + this.upper = upper; + return this; + } + + public DocumentWindowBuilder toCurrent() { + return to("current"); + } + + public DocumentWindowBuilder toUnbounded() { + return to("unbounded"); + } + + public DocumentWindow build() { + return new DocumentWindow(lower, upper); + } + } + + /** + * Common base class for {@link Window} implementation. + * + * @author Christoph Strobl + */ + abstract static class WindowImp implements Window { + + private final Object upper; + private final Object lower; + + protected WindowImp(Object lower, Object upper) { + this.upper = upper; + this.lower = lower; + } + + @Override + public Object getUpper() { + return upper; + } + + @Override + public Object getLower() { + return lower; + } + } + + /** + * {@link Window} implementation based on the current document. + * + * @author Christoph Strobl + */ + public static class DocumentWindow extends WindowImp { + + DocumentWindow(Object lower, Object upper) { + super(lower, upper); + } + + @Override + public Document toDocument(AggregationOperationContext ctx) { + return new Document("documents", Arrays.asList(getLower(), getUpper())); + } + } + + /** + * {@link Window} implementation based on the sort fields. + * + * @author Christoph Strobl + */ + public static class RangeWindow extends WindowImp { + + @Nullable // + private WindowUnit unit; + + protected RangeWindow(Object lower, Object upper, WindowUnit unit) { + + super(lower, upper); + this.unit = unit; + } + + @Override + public Document toDocument(AggregationOperationContext ctx) { + + Document range = new Document("range", new Object[] { getLower(), getUpper() }); + if (unit != null && !WindowUnits.DEFAULT.equals(unit)) { + range.append("unit", unit.name().toLowerCase()); + } + return range; + } + } + + /** + * The actual time unit to apply to a {@link Window}. + */ + public interface WindowUnit { + String name(); + } + + /** + * Quick access to available {@link WindowUnit units}. + */ + public enum WindowUnits implements WindowUnit { + DEFAULT, YEAR, QUARTER, MONTH, WEEK, DAY, HOUR, MINUTE, SECOND, MILLISECOND + } + + /** + * A fluent builder to create a {@link SetWindowFieldsOperation}. + * + * @author Christoph Strobl + */ + public static class SetWindowFieldsOperationBuilder { + + private Object partitionBy; + private SortOperation sortOperation; + private WindowOutput output; + + /** + * Specify the field to group by. + * + * @param fieldName must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder partitionByField(String fieldName) { + return partitionBy(Fields.field("$" + fieldName, fieldName)); + } + + /** + * Specify the {@link AggregationExpression expression} to group by. + * + * @param expression must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder partitionByExpression(AggregationExpression expression) { + return partitionBy(expression); + } + + /** + * Sort {@link Sort.Direction#ASC ascending} by the given fields. + * + * @param fields must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder sortBy(String... fields) { + return sortBy(Sort.by(fields)); + } + + /** + * Set the sort order. + * + * @param sort must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder sortBy(Sort sort) { + return sortBy(new SortOperation(sort)); + } + + /** + * Set the {@link SortOperation} to use. + * + * @param sort must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder sortBy(SortOperation sort) { + + this.sortOperation = sort; + return this; + } + + /** + * Define the actual output computation. + * + * @param output must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder output(WindowOutput output) { + + this.output = output; + return this; + } + + /** + * Add a field capturing the result of the given {@link AggregationExpression expression} to the output. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link WindowChoice}. + */ + public WindowChoice output(AggregationExpression expression) { + + return new WindowChoice() { + + @Nullable private Window window; + + @Override + public As within(Window window) { + + this.window = window; + return this; + } + + @Override + public SetWindowFieldsOperationBuilder as(String targetFieldName) { + + ComputedField computedField = new ComputedField(targetFieldName, expression, window); + + if (SetWindowFieldsOperationBuilder.this.output == null) { + SetWindowFieldsOperationBuilder.this.output = new WindowOutput(computedField); + } else { + SetWindowFieldsOperationBuilder.this.output.append(computedField); + } + + return SetWindowFieldsOperationBuilder.this; + } + }; + } + + /** + * Interface to capture field name used to capture the computation result. + */ + public interface As { + + /** + * Define the target name field name to hold the computation result. + * + * @param targetFieldName must not be {@literal null}. + * @return the starting point {@link SetWindowFieldsOperationBuilder builder} instance. + */ + SetWindowFieldsOperationBuilder as(String targetFieldName); + } + + /** + * Interface to capture an optional {@link Window} applicable to the field computation. + */ + public interface WindowChoice extends As { + + /** + * Specify calculation boundaries. + * + * @param window must not be {@literal null}. + * @return never {@literal null}. + */ + As within(Window window); + + } + + /** + * Partition by a value that transaltes to a valid mongodb expression. + * + * @param value must not be {@literal null}. + * @return this. + */ + public SetWindowFieldsOperationBuilder partitionBy(Object value) { + + partitionBy = value; + return this; + } + + /** + * Obtain a new instance of {@link SetWindowFieldsOperation} with previously set arguments. + * + * @return new instance of {@link SetWindowFieldsOperation}. + */ + public SetWindowFieldsOperation build() { + return new SetWindowFieldsOperation(partitionBy, sortOperation, output); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java new file mode 100644 index 0000000000..b88e0479a3 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java @@ -0,0 +1,132 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; +import java.util.Date; +import java.util.List; + +import org.bson.Document; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.springframework.data.annotation.Id; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.DateOperators.Year; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.Windows; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; +import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestTemplate; +import org.springframework.data.mongodb.test.util.Template; + +/** + * @author Christoph Strobl + */ +@ExtendWith(MongoTemplateExtension.class) +@EnableIfMongoServerVersion(isGreaterThanEqual = "5.0") +class SetWindowFieldsOperationTests { + + @Template // + private static MongoTestTemplate mongoTemplate; + + @AfterEach + void afterEach() { + mongoTemplate.flush(CakeSale.class); + } + + @Test // GH-3711 + void executesSetWindowFieldsOperationCorrectly() { + + initCakeSales(); + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .partitionByField("state") // resolves to field ref "$state" + .sortBy(Sort.by(Direction.ASC, "date")) // resolves to "orderDate" + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("cumulativeQuantityForState") // + .build(); // + + AggregationResults results = mongoTemplate.aggregateAndReturn(Document.class) + .by(Aggregation.newAggregation(CakeSale.class, setWindowFieldsOperation)).all(); + + assertThat(results.getMappedResults()).map(it -> it.get("cumulativeQuantityForState")).contains(162, 282, 427, 134, + 238, 378); + } + + @Test // GH-3711 + void executesSetWindowFieldsOperationWithPartitionExpressionCorrectly() { + + initCakeSales(); + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .partitionByExpression(Year.yearOf("date")) // resolves to $year: "$orderDate" + .sortBy(Sort.by(Direction.ASC, "date")) // resolves to "orderDate" + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("cumulativeQuantityForState") // + .build(); // + + AggregationResults results = mongoTemplate.aggregateAndReturn(Document.class) + .by(Aggregation.newAggregation(CakeSale.class, setWindowFieldsOperation)).all(); + + assertThat(results.getMappedResults()).map(it -> it.get("cumulativeQuantityForState")).contains(134, 296, 104, 224, + 145, 285); + } + + void initCakeSales() { + + mongoTemplate.execute(CakeSale.class, collection -> { + + List source = Arrays.asList(Document.parse( + "{ _id: 0, type: \"chocolate\", orderDate: { $date : \"2020-05-18T14:10:30Z\" }, state: \"CA\", price: 13, quantity: 120 }"), + Document.parse( + "{ _id: 1, type: \"chocolate\", orderDate: { $date : \"2021-03-20T11:30:05Z\"}, state: \"WA\", price: 14, quantity: 140 }"), + Document.parse( + "{ _id: 2, type: \"vanilla\", orderDate: { $date : \"2021-01-11T06:31:15Z\"}, state: \"CA\", price: 12, quantity: 145 }"), + Document.parse( + "{ _id: 3, type: \"vanilla\", orderDate: { $date : \"2020-02-08T13:13:23Z\"}, state: \"WA\", price: 13, quantity: 104 }"), + Document.parse( + "{ _id: 4, type: \"strawberry\", orderDate: { $date : \"2019-05-18T16:09:01Z\"}, state: \"CA\", price: 41, quantity: 162 }"), + Document.parse( + "{ _id: 5, type: \"strawberry\", orderDate: { $date : \"2019-01-08T06:12:03Z\"}, state: \"WA\", price: 43, quantity: 134 }")); + + collection.insertMany(source); + return "OK"; + }); + } + + @lombok.Data + static class CakeSale { + + @Id Integer id; + + String state; + + @Field("orderDate") // + Date date; + + @Field("quantity") // + Integer qty; + + String type; + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java new file mode 100644 index 0000000000..87e3f8f54c --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java @@ -0,0 +1,111 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Date; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.Windows; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * @author Christoph Strobl + */ +public class SetWindowFieldsOperationUnitTests { + + @Test // GH-3711 + void rendersTargetFieldNamesCorrectly() { + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .partitionByField("state") // resolves to field ref "$state" + .sortBy(Sort.by(Direction.ASC, "date")) // resolves to "orderDate" + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("cumulativeQuantityForState") // + .build(); // + + Document document = setWindowFieldsOperation.toDocument(contextFor(CakeSale.class)); + assertThat(document).isEqualTo(Document.parse( + "{ $setWindowFields: { partitionBy: \"$state\", sortBy: { orderDate: 1 }, output: { cumulativeQuantityForState: { $sum: \"$quantity\", window: { documents: [ \"unbounded\", \"current\" ] } } } } }")); + } + + @Test // GH-3711 + void exposesTargetFieldNames() { + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("f1") // + .output(AccumulatorOperators.valueOf("qty").avg()) // resolves to "$quantity" + .within(Windows.documents().from(-1).to(0).build()) // + .as("f2") // + .build(); // + + assertThat(setWindowFieldsOperation.getFields()).map(ExposedField::getName).containsExactly("f1", "f2"); + } + + @Test // GH-3711 + void rendersMuiltipleOutputFields() { + + SetWindowFieldsOperation setWindowFieldsOperation = SetWindowFieldsOperation.builder() // + .output(AccumulatorOperators.valueOf("qty").sum()) // resolves to "$quantity" + .within(Windows.documents().fromUnbounded().toCurrent().build()) // + .as("f1") // + .output(AccumulatorOperators.valueOf("qty").avg()) // resolves to "$quantity" + .within(Windows.documents().from(-1).to(0).build()) // + .as("f2") // + .build(); // + + Document document = setWindowFieldsOperation.toDocument(contextFor(CakeSale.class)); + assertThat(document).isEqualTo(Document.parse( + "{ $setWindowFields: { output: { f1 : { $sum: \"$quantity\", window: { documents: [ \"unbounded\", \"current\" ] } }, f2 : { $avg: \"$quantity\", window: { documents: [ -1, 0 ] } } } } }")); + } + + private static AggregationOperationContext contextFor(@Nullable Class type) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)); + } + + static class CakeSale { + + String state; + + @Field("orderDate") Date date; + + @Field("quantity") Integer qty; + + } +} From f9f4c4621be4a8bd03a542caf82b48a528d913db Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Mon, 23 Aug 2021 11:16:21 +0200 Subject: [PATCH 075/983] Polishing. Update javadoc and add assertions. See #3711 Original pull request: #3739. --- .../aggregation/SetWindowFieldsOperation.java | 176 ++++++++++-------- .../SetWindowFieldsOperationTests.java | 2 + .../SetWindowFieldsOperationUnitTests.java | 4 +- 3 files changed, 107 insertions(+), 75 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java index 0f0909beb2..9c40a0b642 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java @@ -25,6 +25,8 @@ import org.springframework.util.Assert; /** + * Encapsulates the {@code setWindowFields}-operation. + * * @author Christoph Strobl * @since 3.3 * @see fields; + private final List fields; /** * Create a new output containing the single given {@link ComputedField field}. @@ -128,6 +129,7 @@ public static class WindowOutput { public WindowOutput(ComputedField outputField) { Assert.notNull(outputField, "OutputField must not be null!"); + this.fields = new ArrayList<>(); this.fields.add(outputField); } @@ -141,6 +143,7 @@ public WindowOutput(ComputedField outputField) { public WindowOutput append(ComputedField field) { Assert.notNull(field, "Field must not be null!"); + fields.add(field); return this; } @@ -202,11 +205,9 @@ interface ComputedFieldAppender { */ public static class ComputedField implements Field { - private String name; - private AggregationExpression windowOperator; - - @Nullable // - private Window window; + private final String name; + private final AggregationExpression windowOperator; + private final @Nullable Window window; /** * Create a new {@link ComputedField}. @@ -286,7 +287,7 @@ static DocumentWindow documents(Object lower, Object upper) { * @return new instance of {@link RangeWindow}. */ static RangeWindow range(Object lower, Object upper, @Nullable WindowUnit unit) { - return new RangeWindow(lower, upper, unit); + return new RangeWindow(lower, upper, unit == null ? WindowUnits.DEFAULT : unit); } /** @@ -314,18 +315,18 @@ static DocumentWindowBuilder documents() { public interface Window { /** - * The upper (inclusive) boundary. + * The lower (inclusive) boundary. * * @return */ - Object getUpper(); + Object getLower(); /** - * The lower (inclusive) boundary. - * + * The upper (inclusive) boundary. + * * @return */ - Object getLower(); + Object getUpper(); /** * Obtain the document representation of the window in a default {@link AggregationOperationContext context}. @@ -351,14 +352,21 @@ default Document toDocument() { */ public static class RangeWindowBuilder { - @Nullable // - private Object upper; + private @Nullable Object lower; + private @Nullable Object upper; + private @Nullable WindowUnit unit; - @Nullable // - private Object lower; + /** + * The lower (inclusive) range limit based on the sortBy field. + * + * @param lower eg. {@literal current} or {@literal unbounded}. + * @return this. + */ + public RangeWindowBuilder from(String lower) { - @Nullable // - private WindowUnit unit; + this.lower = lower; + return this; + } /** * The upper (inclusive) range limit based on the sortBy field. @@ -373,19 +381,23 @@ public RangeWindowBuilder to(String upper) { } /** - * The lower (inclusive) range limit based on the sortBy field. + * The lower (inclusive) range limit value to add to the value based on the sortBy field. Use a negative integer for + * a position before the current document. Use a positive integer for a position after the current document. + * {@code 0} is the current document position. * - * @param lower eg. {@literal current} or {@literal unbounded}. + * @param lower * @return this. */ - public RangeWindowBuilder from(String lower) { + public RangeWindowBuilder from(Number lower) { this.lower = lower; return this; } /** - * The upper (inclusive) range limit value to add to the value based on the sortBy field. + * The upper (inclusive) range limit value to add to the value based on the sortBy field. Use a negative integer for + * a position before the current document. Use a positive integer for a position after the current document. + * {@code 0} is the current document position. * * @param upper * @return this. @@ -396,25 +408,13 @@ public RangeWindowBuilder to(Number upper) { return this; } - /** - * The lower (inclusive) range limit value to add to the value based on the sortBy field. - * - * @param lower - * @return this. - */ - public RangeWindowBuilder from(Number lower) { - - this.lower = lower; - return this; - } - /** * Use {@literal current} as {@link #from(String) lower} limit. * * @return this. */ public RangeWindowBuilder fromCurrent() { - return from("current"); + return from(CURRENT); } /** @@ -423,7 +423,7 @@ public RangeWindowBuilder fromCurrent() { * @return this. */ public RangeWindowBuilder fromUnbounded() { - return from("unbounded"); + return from(UNBOUNDED); } /** @@ -432,7 +432,7 @@ public RangeWindowBuilder fromUnbounded() { * @return this. */ public RangeWindowBuilder toCurrent() { - return to("current"); + return to(CURRENT); } /** @@ -441,7 +441,7 @@ public RangeWindowBuilder toCurrent() { * @return this. */ public RangeWindowBuilder toUnbounded() { - return to("unbounded"); + return to(UNBOUNDED); } /** @@ -452,7 +452,8 @@ public RangeWindowBuilder toUnbounded() { */ public RangeWindowBuilder unit(WindowUnit windowUnit) { - this.unit = unit; + Assert.notNull(windowUnit, "WindowUnit must not be null"); + this.unit = windowUnit; return this; } @@ -462,6 +463,11 @@ public RangeWindowBuilder unit(WindowUnit windowUnit) { * @return new instance of {@link RangeWindow}. */ public RangeWindow build() { + + Assert.notNull(lower, "Lower bound must not be null"); + Assert.notNull(upper, "Upper bound must not be null"); + Assert.notNull(unit, "WindowUnit bound must not be null"); + return new RangeWindow(lower, upper, unit); } } @@ -473,12 +479,17 @@ public RangeWindow build() { */ public static class DocumentWindowBuilder { - @Nullable // - private Object upper; - - @Nullable // - private Object lower; + private @Nullable Object lower; + private @Nullable Object upper; + /** + * The lower (inclusive) range limit based on current document. Use a negative integer for a position before the + * current document. Use a positive integer for a position after the current document. {@code 0} is the current + * document position. + * + * @param lower + * @return this. + */ public DocumentWindowBuilder from(Number lower) { this.lower = lower; @@ -486,11 +497,11 @@ public DocumentWindowBuilder from(Number lower) { } public DocumentWindowBuilder fromCurrent() { - return from("current"); + return from(CURRENT); } public DocumentWindowBuilder fromUnbounded() { - return from("unbounded"); + return from(UNBOUNDED); } public DocumentWindowBuilder to(String upper) { @@ -512,9 +523,11 @@ public DocumentWindowBuilder from(String lower) { } /** - * The upper (inclusive) range limit based on current document. + * The upper (inclusive) range limit based on current document. Use a negative integer for a position before the + * current document. Use a positive integer for a position after the current document. {@code 0} is the current + * document position. * - * @param upper eg. {@literal current} or {@literal unbounded}. + * @param upper * @return this. */ public DocumentWindowBuilder to(Number upper) { @@ -524,14 +537,18 @@ public DocumentWindowBuilder to(Number upper) { } public DocumentWindowBuilder toCurrent() { - return to("current"); + return to(CURRENT); } public DocumentWindowBuilder toUnbounded() { - return to("unbounded"); + return to(UNBOUNDED); } public DocumentWindow build() { + + Assert.notNull(lower, "Lower bound must not be null"); + Assert.notNull(upper, "Upper bound must not be null"); + return new DocumentWindow(lower, upper); } } @@ -541,24 +558,24 @@ public DocumentWindow build() { * * @author Christoph Strobl */ - abstract static class WindowImp implements Window { + static abstract class WindowImpl implements Window { - private final Object upper; private final Object lower; + private final Object upper; - protected WindowImp(Object lower, Object upper) { - this.upper = upper; + protected WindowImpl(Object lower, Object upper) { this.lower = lower; + this.upper = upper; } @Override - public Object getUpper() { - return upper; + public Object getLower() { + return lower; } @Override - public Object getLower() { - return lower; + public Object getUpper() { + return upper; } } @@ -567,7 +584,7 @@ public Object getLower() { * * @author Christoph Strobl */ - public static class DocumentWindow extends WindowImp { + public static class DocumentWindow extends WindowImpl { DocumentWindow(Object lower, Object upper) { super(lower, upper); @@ -584,10 +601,9 @@ public Document toDocument(AggregationOperationContext ctx) { * * @author Christoph Strobl */ - public static class RangeWindow extends WindowImp { + public static class RangeWindow extends WindowImpl { - @Nullable // - private WindowUnit unit; + private final WindowUnit unit; protected RangeWindow(Object lower, Object upper, WindowUnit unit) { @@ -634,10 +650,12 @@ public static class SetWindowFieldsOperationBuilder { /** * Specify the field to group by. * - * @param fieldName must not be {@literal null}. + * @param fieldName must not be {@literal null} or null. * @return this. */ public SetWindowFieldsOperationBuilder partitionByField(String fieldName) { + + Assert.hasText(fieldName, "Field name must not be empty or null"); return partitionBy(Fields.field("$" + fieldName, fieldName)); } @@ -679,6 +697,8 @@ public SetWindowFieldsOperationBuilder sortBy(Sort sort) { */ public SetWindowFieldsOperationBuilder sortBy(SortOperation sort) { + Assert.notNull(sort, "SortOperation must not be null"); + this.sortOperation = sort; return this; } @@ -691,6 +711,8 @@ public SetWindowFieldsOperationBuilder sortBy(SortOperation sort) { */ public SetWindowFieldsOperationBuilder output(WindowOutput output) { + Assert.notNull(output, "WindowOutput must not be null"); + this.output = output; return this; } @@ -710,6 +732,8 @@ public WindowChoice output(AggregationExpression expression) { @Override public As within(Window window) { + Assert.notNull(window, "Window must not be null"); + this.window = window; return this; } @@ -717,6 +741,8 @@ public As within(Window window) { @Override public SetWindowFieldsOperationBuilder as(String targetFieldName) { + Assert.hasText(targetFieldName, "Target field name must not be empty or null"); + ComputedField computedField = new ComputedField(targetFieldName, expression, window); if (SetWindowFieldsOperationBuilder.this.output == null) { @@ -738,7 +764,7 @@ public interface As { /** * Define the target name field name to hold the computation result. * - * @param targetFieldName must not be {@literal null}. + * @param targetFieldName must not be {@literal null} or empty. * @return the starting point {@link SetWindowFieldsOperationBuilder builder} instance. */ SetWindowFieldsOperationBuilder as(String targetFieldName); @@ -760,20 +786,22 @@ public interface WindowChoice extends As { } /** - * Partition by a value that transaltes to a valid mongodb expression. + * Partition by a value that translates to a valid mongodb expression. * * @param value must not be {@literal null}. * @return this. */ public SetWindowFieldsOperationBuilder partitionBy(Object value) { + Assert.notNull(value, "Partition By must not be null"); + partitionBy = value; return this; } /** * Obtain a new instance of {@link SetWindowFieldsOperation} with previously set arguments. - * + * * @return new instance of {@link SetWindowFieldsOperation}. */ public SetWindowFieldsOperation build() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java index b88e0479a3..17bfb9b5a3 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationTests.java @@ -37,6 +37,8 @@ import org.springframework.data.mongodb.test.util.Template; /** + * Integration tests for {@link SetWindowFieldsOperation}. + * * @author Christoph Strobl */ @ExtendWith(MongoTemplateExtension.class) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java index 87e3f8f54c..62b0f4dffc 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperationUnitTests.java @@ -33,9 +33,11 @@ import org.springframework.lang.Nullable; /** + * Unit tests for {@link SetWindowFieldsOperation}. + * * @author Christoph Strobl */ -public class SetWindowFieldsOperationUnitTests { +class SetWindowFieldsOperationUnitTests { @Test // GH-3711 void rendersTargetFieldNamesCorrectly() { From c574e5cf8a5b553399a0faf65d1ba0e1574470aa Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Thu, 22 Jul 2021 11:44:15 +0200 Subject: [PATCH 076/983] Add support for `$covariancePop` and `$covarianceSamp` aggregation expressions. Closes: #3712 Original pull request: #3740. --- .../aggregation/AccumulatorOperators.java | 177 ++++++++++++++++++ .../core/aggregation/ArithmeticOperators.java | 59 ++++++ .../core/spel/MethodReferenceNode.java | 2 + .../aggregation/TestAggregationContext.java | 75 ++++++++ .../AccumulatorOperatorsUnitTests.java | 77 ++++++++ .../SpelExpressionTransformerUnitTests.java | 10 + 6 files changed, 400 insertions(+) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/aggregation/TestAggregationContext.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java index 6698b932f8..1ea1af9731 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java @@ -142,6 +142,63 @@ public StdDevSamp stdDevSamp() { return usesFieldRef() ? StdDevSamp.stdDevSampOf(fieldReference) : StdDevSamp.stdDevSampOf(expression); } + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the given + * field to calculate the population covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(String fieldReference) { + return covariancePop().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the given + * {@link AggregationExpression expression} to calculate the population covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(AggregationExpression expression) { + return covariancePop().and(expression); + } + + private CovariancePop covariancePop() { + return usesFieldRef() ? CovariancePop.covariancePopOf(fieldReference) : CovariancePop.covariancePopOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the given + * field to calculate the sample covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(String fieldReference) { + return covarianceSamp().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the given + * {@link AggregationExpression expression} to calculate the sample covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(AggregationExpression expression) { + return covarianceSamp().and(expression); + } + + private CovarianceSamp covarianceSamp() { + return usesFieldRef() ? CovarianceSamp.covarianceSampOf(fieldReference) + : CovarianceSamp.covarianceSampOf(expression); + } + private boolean usesFieldRef() { return fieldReference != null; } @@ -658,4 +715,124 @@ public Document toDocument(Object value, AggregationOperationContext context) { return super.toDocument(value, context); } } + + /** + * {@link AggregationExpression} for {@code $covariancePop}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class CovariancePop extends AbstractAggregationExpression { + + private CovariancePop(Object value) { + super(value); + } + + /** + * Creates new {@link CovariancePop}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public static CovariancePop covariancePopOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new CovariancePop(asFields(fieldReference)); + } + + /** + * Creates new {@link CovariancePop}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public static CovariancePop covariancePopOf(AggregationExpression expression) { + return new CovariancePop(Collections.singletonList(expression)); + } + + /** + * Creates new {@link CovariancePop} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public CovariancePop and(String fieldReference) { + return new CovariancePop(append(asFields(fieldReference))); + } + + /** + * Creates new {@link CovariancePop} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public CovariancePop and(AggregationExpression expression) { + return new CovariancePop(append(expression)); + } + + @Override + protected String getMongoMethod() { + return "$covariancePop"; + } + } + + /** + * {@link AggregationExpression} for {@code $covarianceSamp}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class CovarianceSamp extends AbstractAggregationExpression { + + private CovarianceSamp(Object value) { + super(value); + } + + /** + * Creates new {@link CovarianceSamp}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public static CovarianceSamp covarianceSampOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new CovarianceSamp(asFields(fieldReference)); + } + + /** + * Creates new {@link CovarianceSamp}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public static CovarianceSamp covarianceSampOf(AggregationExpression expression) { + return new CovarianceSamp(Collections.singletonList(expression)); + } + + /** + * Creates new {@link CovarianceSamp} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public CovarianceSamp and(String fieldReference) { + return new CovarianceSamp(append(asFields(fieldReference))); + } + + /** + * Creates new {@link CovarianceSamp} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public CovarianceSamp and(AggregationExpression expression) { + return new CovarianceSamp(append(expression)); + } + + @Override + protected String getMongoMethod() { + return "$covarianceSamp"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 6053f3ae1b..b27e54d298 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -19,6 +19,8 @@ import java.util.List; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Avg; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovariancePop; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovarianceSamp; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Max; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Min; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevPop; @@ -511,6 +513,63 @@ public StdDevSamp stdDevSamp() { : AccumulatorOperators.StdDevSamp.stdDevSampOf(expression); } + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the given + * field to calculate the population covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(String fieldReference) { + return covariancePop().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the given + * {@link AggregationExpression expression} to calculate the population covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(AggregationExpression expression) { + return covariancePop().and(expression); + } + + private CovariancePop covariancePop() { + return usesFieldRef() ? CovariancePop.covariancePopOf(fieldReference) : CovariancePop.covariancePopOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the given + * field to calculate the sample covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(String fieldReference) { + return covarianceSamp().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the given + * {@link AggregationExpression expression} to calculate the sample covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(AggregationExpression expression) { + return covarianceSamp().and(expression); + } + + private CovarianceSamp covarianceSamp() { + return usesFieldRef() ? CovarianceSamp.covarianceSampOf(fieldReference) + : CovarianceSamp.covarianceSampOf(expression); + } + /** * Creates new {@link AggregationExpression} that rounds a number to a whole integer or to a specified decimal * place. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 5a2c48bc20..c858926446 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -170,6 +170,8 @@ public class MethodReferenceNode extends ExpressionNode { map.put("addToSet", singleArgRef().forOperator("$addToSet")); map.put("stdDevPop", arrayArgRef().forOperator("$stdDevPop")); map.put("stdDevSamp", arrayArgRef().forOperator("$stdDevSamp")); + map.put("covariancePop", arrayArgRef().forOperator("$covariancePop")); + map.put("covarianceSamp", arrayArgRef().forOperator("$covarianceSamp")); // TYPE OPERATORS map.put("type", singleArgRef().forOperator("$type")); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/aggregation/TestAggregationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/aggregation/TestAggregationContext.java new file mode 100644 index 0000000000..4f16072e43 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/aggregation/TestAggregationContext.java @@ -0,0 +1,75 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.aggregation; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.data.mongodb.core.aggregation.Field; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.lang.Nullable; + +/** + * @author Christoph Strobl + */ +public class TestAggregationContext implements AggregationOperationContext { + + private final AggregationOperationContext delegate; + + private TestAggregationContext(AggregationOperationContext delegate) { + this.delegate = delegate; + } + + public static AggregationOperationContext contextFor(@Nullable Class type) { + + MappingMongoConverter mongoConverter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, + new MongoMappingContext()); + mongoConverter.afterPropertiesSet(); + + return contextFor(type, mongoConverter); + } + + public static AggregationOperationContext contextFor(@Nullable Class type, MongoConverter mongoConverter) { + + if (type == null) { + return Aggregation.DEFAULT_CONTEXT; + } + + return new TestAggregationContext(new TypeBasedAggregationOperationContext(type, mongoConverter.getMappingContext(), + new QueryMapper(mongoConverter)).continueOnMissingFieldReference()); + } + + @Override + public Document getMappedObject(Document document, @Nullable Class type) { + return delegate.getMappedObject(document, type); + } + + @Override + public FieldReference getReference(Field field) { + return delegate.getReference(field); + } + + @Override + public FieldReference getReference(String name) { + return delegate.getReference(name); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java new file mode 100644 index 0000000000..977183c448 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java @@ -0,0 +1,77 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import java.util.Arrays; +import java.util.Date; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.aggregation.DateOperators.Year; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.util.aggregation.TestAggregationContext; + +/** + * @author Christoph Strobl + */ +class AccumulatorOperatorsUnitTests { + + @Test // GH-3712 + void rendersCovariancePopWithFieldReference() { + + assertThat(AccumulatorOperators.valueOf("balance").covariancePop("midichlorianCount") + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(new Document("$covariancePop", Arrays.asList("$balance", "$force"))); + } + + @Test // GH-3712 + void rendersCovariancePopWithExpression() { + + assertThat(AccumulatorOperators.valueOf(Year.yearOf("birthdate")).covariancePop("midichlorianCount") + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(new Document("$covariancePop", Arrays.asList(new Document("$year", "$birthdate"), "$force"))); + } + + @Test // GH-3712 + void rendersCovarianceSampWithFieldReference() { + + assertThat(AccumulatorOperators.valueOf("balance").covarianceSamp("midichlorianCount") + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(new Document("$covarianceSamp", Arrays.asList("$balance", "$force"))); + } + + @Test // GH-3712 + void rendersCovarianceSampWithExpression() { + + assertThat(AccumulatorOperators.valueOf(Year.yearOf("birthdate")).covarianceSamp("midichlorianCount") + .toDocument(TestAggregationContext.contextFor(Jedi.class))) + .isEqualTo(new Document("$covarianceSamp", Arrays.asList(new Document("$year", "$birthdate"), "$force"))); + } + + static class Jedi { + + String name; + + Date birthdate; + + @Field("force") + Integer midichlorianCount; + + Integer balance; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index b67beed126..c4b945ab94 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -946,6 +946,16 @@ public void shouldRenderRoundWithPlace() { assertThat(transform("round(field, 2)")).isEqualTo(Document.parse("{ \"$round\" : [\"$field\", 2]}")); } + @Test // GH-3712 + void shouldRenderCovariancePop() { + assertThat(transform("covariancePop(field1, field2)")).isEqualTo(Document.parse("{ \"$covariancePop\" : [\"$field1\", \"$field2\"]}")); + } + + @Test // GH-3712 + void shouldRenderCovarianceSamp() { + assertThat(transform("covarianceSamp(field1, field2)")).isEqualTo(Document.parse("{ \"$covarianceSamp\" : [\"$field1\", \"$field2\"]}")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From dbfd4e5c624a9770a2ee6d3235b1e22a604583c7 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Thu, 22 Jul 2021 11:44:15 +0200 Subject: [PATCH 077/983] Polishing. Reformat code. See #3712 Original pull request: #3740. --- .../core/aggregation/AccumulatorOperators.java | 16 ++++++++-------- .../AccumulatorOperatorsUnitTests.java | 2 ++ src/main/asciidoc/reference/mongodb.adoc | 8 ++++---- 3 files changed, 14 insertions(+), 12 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java index 1ea1af9731..13913caacf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java @@ -143,8 +143,8 @@ public StdDevSamp stdDevSamp() { } /** - * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the given - * field to calculate the population covariance of the two. + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the population covariance of the two. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link CovariancePop}. @@ -155,8 +155,8 @@ public CovariancePop covariancePop(String fieldReference) { } /** - * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the given - * {@link AggregationExpression expression} to calculate the population covariance of the two. + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the population covariance of the two. * * @param expression must not be {@literal null}. * @return new instance of {@link CovariancePop}. @@ -171,8 +171,8 @@ private CovariancePop covariancePop() { } /** - * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the given - * field to calculate the sample covariance of the two. + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the sample covariance of the two. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link CovariancePop}. @@ -183,8 +183,8 @@ public CovarianceSamp covarianceSamp(String fieldReference) { } /** - * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the given - * {@link AggregationExpression expression} to calculate the sample covariance of the two. + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the sample covariance of the two. * * @param expression must not be {@literal null}. * @return new instance of {@link CovariancePop}. diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java index 977183c448..6948255d15 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java @@ -27,6 +27,8 @@ import org.springframework.data.mongodb.util.aggregation.TestAggregationContext; /** + * Unit tests for {@link AccumulatorOperators}. + * * @author Christoph Strobl */ class AccumulatorOperatorsUnitTests { diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc index 84afc7ea09..3bfa500731 100644 --- a/src/main/asciidoc/reference/mongodb.adoc +++ b/src/main/asciidoc/reference/mongodb.adoc @@ -1002,7 +1002,7 @@ assertThat(upserted.getFirstName()).isEqualTo("Mary"); assertThat(upserted.getAge()).isOne(); ---- -[[mongo-template.aggregation-update]] += [[mongo-template.aggregation-update]] === Aggregation Pipeline Updates Update methods exposed by `MongoOperations` and `ReactiveMongoOperations` also accept an <> via `AggregationUpdate`. @@ -2502,8 +2502,8 @@ At the time of this writing, we provide support for the following Aggregation Op | Set Aggregation Operators | `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue` -| Group Aggregation Operators -| `addToSet`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp` +| Group/Accumulator Aggregation Operators +| `addToSet`, `covariancePop`, `covarianceSamp`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators | `abs`, `add` (*via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (*via `minus`), `trunc` @@ -2544,7 +2544,7 @@ At the time of this writing, we provide support for the following Aggregation Op * The operation is mapped or added by Spring Data MongoDB. -Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB. Comparison aggregation operators are expressed as `Criteria` expressions. +Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB.Comparison aggregation operators are expressed as `Criteria` expressions. [[mongo.aggregation.projection]] === Projection Expressions From f3e067f59f9147b4abc314b021c8b7769c2bc127 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Fri, 23 Jul 2021 09:10:35 +0200 Subject: [PATCH 078/983] Add support for `$expMovingAvg` aggregation operator. The SpEL support for this one is missing due to the differing argument map (N, alpha). Closes: #3718 Original pull request: #3744. --- .../aggregation/AccumulatorOperators.java | 111 ++++++++++++++++++ .../AccumulatorOperatorsUnitTests.java | 24 +++- src/main/asciidoc/reference/mongodb.adoc | 2 +- 3 files changed, 131 insertions(+), 6 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java index 13913caacf..ba2c34edab 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java @@ -199,11 +199,61 @@ private CovarianceSamp covarianceSamp() { : CovarianceSamp.covarianceSampOf(expression); } + /** + * Creates new {@link ExpMovingAvgBuilder} that to build {@link AggregationExpression expMovingAvg} that calculates + * the exponential moving average of numeric values + * + * @return new instance of {@link ExpMovingAvg}. + * @since 3.3 + */ + public ExpMovingAvgBuilder expMovingAvg() { + + ExpMovingAvg expMovingAvg = usesFieldRef() ? ExpMovingAvg.expMovingAvgOf(fieldReference) + : ExpMovingAvg.expMovingAvgOf(expression); + return new ExpMovingAvgBuilder() { + + @Override + public ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments) { + return expMovingAvg.n(numberOfHistoricalDocuments); + } + + @Override + public ExpMovingAvg alpha(double exponentialDecayValue) { + return expMovingAvg.alpha(exponentialDecayValue); + } + }; + } + private boolean usesFieldRef() { return fieldReference != null; } } + /** + * Builder for {@link ExpMovingAvg}. + * + * @since 3.3 + */ + public interface ExpMovingAvgBuilder { + + /** + * Define the number of historical documents with significant mathematical weight. + * + * @param numberOfHistoricalDocuments + * @return new instance of {@link ExpMovingAvg}. + */ + ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments); + + /** + * Define the exponential decay value. + * + * @param exponentialDecayValue + * @return new instance of {@link ExpMovingAvg}. + */ + ExpMovingAvg alpha(double exponentialDecayValue); + + } + /** * {@link AggregationExpression} for {@code $sum}. * @@ -835,4 +885,65 @@ protected String getMongoMethod() { return "$covarianceSamp"; } } + + /** + * {@link ExpMovingAvg} calculates the exponential moving average of numeric values. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class ExpMovingAvg extends AbstractAggregationExpression { + + private ExpMovingAvg(Object value) { + super(value); + } + + /** + * Create a new {@link ExpMovingAvg} by defining the field holding the value to be used as input. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ExpMovingAvg}. + */ + public static ExpMovingAvg expMovingAvgOf(String fieldReference) { + return new ExpMovingAvg(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Create a new {@link ExpMovingAvg} by defining the {@link AggregationExpression expression} to compute the value + * to be used as input. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ExpMovingAvg}. + */ + public static ExpMovingAvg expMovingAvgOf(AggregationExpression expression) { + return new ExpMovingAvg(Collections.singletonMap("input", expression)); + } + + /** + * Define the number of historical documents with significant mathematical weight.
        + * Specify either {@link #n(int) N} or {@link #alpha(double) aplha}. Not both! + * + * @param numberOfHistoricalDocuments + * @return new instance of {@link ExpMovingAvg}. + */ + public ExpMovingAvg n/*umber of historical documents*/(int numberOfHistoricalDocuments) { + return new ExpMovingAvg(append("N", numberOfHistoricalDocuments)); + } + + /** + * Define the exponential decay value.
        + * Specify either {@link #alpha(double) aplha} or {@link #n(int) N}. Not both! + * + * @param exponentialDecayValue + * @return new instance of {@link ExpMovingAvg}. + */ + public ExpMovingAvg alpha(double exponentialDecayValue) { + return new ExpMovingAvg(append("alpha", exponentialDecayValue)); + } + + @Override + protected String getMongoMethod() { + return "$expMovingAvg"; + } + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java index 6948255d15..27bd876255 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperatorsUnitTests.java @@ -16,6 +16,7 @@ package org.springframework.data.mongodb.core.aggregation; import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.*; import java.util.Arrays; import java.util.Date; @@ -46,7 +47,7 @@ void rendersCovariancePopWithExpression() { assertThat(AccumulatorOperators.valueOf(Year.yearOf("birthdate")).covariancePop("midichlorianCount") .toDocument(TestAggregationContext.contextFor(Jedi.class))) - .isEqualTo(new Document("$covariancePop", Arrays.asList(new Document("$year", "$birthdate"), "$force"))); + .isEqualTo(new Document("$covariancePop", Arrays.asList(new Document("$year", "$birthdate"), "$force"))); } @Test // GH-3712 @@ -54,7 +55,7 @@ void rendersCovarianceSampWithFieldReference() { assertThat(AccumulatorOperators.valueOf("balance").covarianceSamp("midichlorianCount") .toDocument(TestAggregationContext.contextFor(Jedi.class))) - .isEqualTo(new Document("$covarianceSamp", Arrays.asList("$balance", "$force"))); + .isEqualTo(new Document("$covarianceSamp", Arrays.asList("$balance", "$force"))); } @Test // GH-3712 @@ -62,7 +63,21 @@ void rendersCovarianceSampWithExpression() { assertThat(AccumulatorOperators.valueOf(Year.yearOf("birthdate")).covarianceSamp("midichlorianCount") .toDocument(TestAggregationContext.contextFor(Jedi.class))) - .isEqualTo(new Document("$covarianceSamp", Arrays.asList(new Document("$year", "$birthdate"), "$force"))); + .isEqualTo(new Document("$covarianceSamp", Arrays.asList(new Document("$year", "$birthdate"), "$force"))); + } + + @Test // GH-3718 + void rendersExpMovingAvgWithNumberOfHistoricDocuments() { + + assertThat(valueOf("price").expMovingAvg().historicalDocuments(2).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $expMovingAvg: { input: \"$price\", N: 2 } }")); + } + + @Test // GH-3718 + void rendersExpMovingAvgWithAlpha() { + + assertThat(valueOf("price").expMovingAvg().alpha(0.75).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $expMovingAvg: { input: \"$price\", alpha: 0.75 } }")); } static class Jedi { @@ -71,8 +86,7 @@ static class Jedi { Date birthdate; - @Field("force") - Integer midichlorianCount; + @Field("force") Integer midichlorianCount; Integer balance; } diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc index 3bfa500731..dfa87dd3c7 100644 --- a/src/main/asciidoc/reference/mongodb.adoc +++ b/src/main/asciidoc/reference/mongodb.adoc @@ -2503,7 +2503,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue` | Group/Accumulator Aggregation Operators -| `addToSet`, `covariancePop`, `covarianceSamp`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp` +| `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators | `abs`, `add` (*via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (*via `minus`), `trunc` From a977b8a790c0d71ad65e8a9bdab3ad6ca2005257 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 23 Aug 2021 13:26:57 +0200 Subject: [PATCH 079/983] Change visibility of Reactive/MongoRepositoryFactoryBean setters. Setters of the FactoryBean should be public. Closes: #3779 Original pull request: #3780. --- .../mongodb/repository/support/MongoRepositoryFactoryBean.java | 2 +- .../repository/support/ReactiveMongoRepositoryFactoryBean.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java index ade85d3110..8f156bdeea 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/MongoRepositoryFactoryBean.java @@ -70,7 +70,7 @@ public void setCreateIndexesForQueryMethods(boolean createIndexesForQueryMethods * @see org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport#setMappingContext(org.springframework.data.mapping.context.MappingContext) */ @Override - protected void setMappingContext(MappingContext mappingContext) { + public void setMappingContext(MappingContext mappingContext) { super.setMappingContext(mappingContext); this.mappingContextConfigured = true; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java index 4e8232714f..6536983a70 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/repository/support/ReactiveMongoRepositoryFactoryBean.java @@ -80,7 +80,7 @@ public void setCreateIndexesForQueryMethods(boolean createIndexesForQueryMethods * @see org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport#setMappingContext(org.springframework.data.mapping.context.MappingContext) */ @Override - protected void setMappingContext(MappingContext mappingContext) { + public void setMappingContext(MappingContext mappingContext) { super.setMappingContext(mappingContext); this.mappingContextConfigured = true; From 30da62181f92ee1e6983eccc4d017941e97ebbb8 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Thu, 22 Jul 2021 13:39:05 +0200 Subject: [PATCH 080/983] Add support for `$rank` and `$denseRank` aggregation operators. Closes: #3715 Original pull request: #3741. --- .../core/aggregation/DocumentOperators.java | 76 +++++++++++++++++++ .../SpelExpressionTransformer.java | 5 +- .../core/spel/MethodReferenceNode.java | 16 +++- .../DocumentOperatorsUnitTests.java | 39 ++++++++++ .../SpelExpressionTransformerUnitTests.java | 10 +++ 5 files changed, 144 insertions(+), 2 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java new file mode 100644 index 0000000000..8ba7acc5b8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java @@ -0,0 +1,76 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; + +/** + * @author Christoph Strobl + * @since 3.3 + */ +public class DocumentOperators { + + /** + * Obtain the document position (including gaps) relative to others (rank). + * + * @return new instance of {@link Rank}. + * @since 3.3 + */ + public static Rank rank() { + return new Rank(); + } + + /** + * Obtain the document position (without gaps) relative to others (rank). + * + * @return new instance of {@link DenseRank}. + * @since 3.3 + */ + public static DenseRank denseRank() { + return new DenseRank(); + } + + /** + * {@link Rank} resolves the current document position (the rank) relative to other documents. If multiple documents + * occupy the same rank, {@literal $rank} places the document with the subsequent value at a rank with a gap. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Rank implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$rank", new Document()); + } + } + + /** + * {@link DenseRank} resolves the current document position (the rank) relative to other documents. If multiple + * documents occupy the same rank, {@literal $denseRank} places the document with the subsequent value at the next rank without + * any gaps. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DenseRank implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$denseRank", new Document()); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java index 33b5c72c78..e00740945b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java @@ -500,7 +500,10 @@ protected Object convert(AggregationExpressionTransformationContext argList = new ArrayList(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index c858926446..763ae830e5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -68,6 +68,10 @@ public class MethodReferenceNode extends ExpressionNode { map.put("lte", arrayArgRef().forOperator("$lte")); map.put("ne", arrayArgRef().forOperator("$ne")); + // DOCUMENT OPERATORS + map.put("rank", emptyRef().forOperator("$rank")); + map.put("denseRank", emptyRef().forOperator("$denseRank")); + // ARITHMETIC OPERATORS map.put("abs", singleArgRef().forOperator("$abs")); map.put("add", arrayArgRef().forOperator("$add")); @@ -307,6 +311,16 @@ static AggregationMethodReference mapArgRef() { return new AggregationMethodReference(null, ArgumentType.MAP, null); } + /** + * Create a new {@link AggregationMethodReference} for a {@link ArgumentType#EMPTY_DOCUMENT} argument. + * + * @return never {@literal null}. + * @since 3.3 + */ + static AggregationMethodReference emptyRef() { + return new AggregationMethodReference(null, ArgumentType.EMPTY_DOCUMENT, null); + } + /** * Create a new {@link AggregationMethodReference} for a given {@literal aggregationExpressionOperator} reusing * previously set arguments. @@ -342,7 +356,7 @@ AggregationMethodReference mappingParametersTo(String... aggregationExpressionPr * @since 1.10 */ public enum ArgumentType { - SINGLE, ARRAY, MAP + SINGLE, ARRAY, MAP, EMPTY_DOCUMENT } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java new file mode 100644 index 0000000000..27ac1beccd --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java @@ -0,0 +1,39 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.DocumentOperators.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * @author Christoph Strobl + */ +class DocumentOperatorsUnitTests { + + @Test // GH-3715 + void rendersRank() { + assertThat(rank().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(new Document("$rank", new Document())); + } + + @Test // GH-3715 + void rendersDenseRank() { + assertThat(denseRank().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(new Document("$denseRank", new Document())); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index c4b945ab94..06659820d8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -956,6 +956,16 @@ void shouldRenderCovarianceSamp() { assertThat(transform("covarianceSamp(field1, field2)")).isEqualTo(Document.parse("{ \"$covarianceSamp\" : [\"$field1\", \"$field2\"]}")); } + @Test // GH-3715 + void shouldRenderRank() { + assertThat(transform("rank()")).isEqualTo(Document.parse("{ $rank : {} }")); + } + + @Test // GH-3715 + void shouldRenderDenseRank() { + assertThat(transform("denseRank()")).isEqualTo(Document.parse("{ $denseRank : {} }")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From 1a86761e2e2002ad4f0aee50de6ebabaa931c84c Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Thu, 22 Jul 2021 14:43:48 +0200 Subject: [PATCH 081/983] Add support for `$documentNumber` aggregation operator. Closes: #3717 Original pull request: #3741. --- .../core/aggregation/DocumentOperators.java | 26 +++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 1 + .../DocumentOperatorsUnitTests.java | 5 ++++ .../SpelExpressionTransformerUnitTests.java | 5 ++++ 4 files changed, 37 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java index 8ba7acc5b8..7a5918e047 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java @@ -18,6 +18,8 @@ import org.bson.Document; /** + * Gateway to {@literal document expressions} such as {@literal $rank, $documentNumber, etc.} + * * @author Christoph Strobl * @since 3.3 */ @@ -43,6 +45,16 @@ public static DenseRank denseRank() { return new DenseRank(); } + /** + * Obtain the current document position. + * + * @return new instance of {@link DocumentNumber}. + * @since 3.3 + */ + public static DocumentNumber documentNumber() { + return new DocumentNumber(); + } + /** * {@link Rank} resolves the current document position (the rank) relative to other documents. If multiple documents * occupy the same rank, {@literal $rank} places the document with the subsequent value at a rank with a gap. @@ -73,4 +85,18 @@ public Document toDocument(AggregationOperationContext context) { return new Document("$denseRank", new Document()); } } + + /** + * {@link DocumentNumber} resolves the current document position. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DocumentNumber implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$documentNumber", new Document()); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 763ae830e5..a184c49be8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -71,6 +71,7 @@ public class MethodReferenceNode extends ExpressionNode { // DOCUMENT OPERATORS map.put("rank", emptyRef().forOperator("$rank")); map.put("denseRank", emptyRef().forOperator("$denseRank")); + map.put("documentNumber", emptyRef().forOperator("$documentNumber")); // ARITHMETIC OPERATORS map.put("abs", singleArgRef().forOperator("$abs")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java index 27ac1beccd..c6604bc543 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java @@ -36,4 +36,9 @@ void rendersDenseRank() { assertThat(denseRank().toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo(new Document("$denseRank", new Document())); } + + @Test // GH-3717 + void rendersDocumentNumber() { + assertThat(documentNumber().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(new Document("$documentNumber", new Document())); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 06659820d8..3c20b58d58 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -966,6 +966,11 @@ void shouldRenderDenseRank() { assertThat(transform("denseRank()")).isEqualTo(Document.parse("{ $denseRank : {} }")); } + @Test // GH-3717 + void shouldRenderDocumentNumber() { + assertThat(transform("documentNumber()")).isEqualTo(Document.parse("{ $documentNumber : {} }")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From 510028a834ff585e8805774a717bf94c56d009d8 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Fri, 23 Jul 2021 11:33:07 +0200 Subject: [PATCH 082/983] Add support for `$shift` aggregation Operator. Closes: #3727 Original pull request: #3741. --- .../core/aggregation/DocumentOperators.java | 124 +++++++++++++++++- .../core/spel/MethodReferenceNode.java | 1 + .../DocumentOperatorsUnitTests.java | 17 ++- .../SpelExpressionTransformerUnitTests.java | 14 ++ 4 files changed, 153 insertions(+), 3 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java index 7a5918e047..76fa591e45 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java @@ -15,6 +15,8 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.util.Collections; + import org.bson.Document; /** @@ -45,6 +47,26 @@ public static DenseRank denseRank() { return new DenseRank(); } + /** + * Take the field referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DocumentOperatorsFactory}. + */ + public static DocumentOperatorsFactory valueOf(String fieldReference) { + return new DocumentOperatorsFactory(fieldReference); + } + + /** + * Take the value resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DocumentOperatorsFactory}. + */ + public static DocumentOperatorsFactory valueOf(AggregationExpression expression) { + return new DocumentOperatorsFactory(expression); + } + /** * Obtain the current document position. * @@ -55,6 +77,35 @@ public static DocumentNumber documentNumber() { return new DocumentNumber(); } + /** + * @author Christoph Strobl + */ + public static class DocumentOperatorsFactory { + + private Object target; + + public DocumentOperatorsFactory(Object target) { + this.target = target; + } + + /** + * Creates new {@link AggregationExpression} that applies the expression to a document at specified position + * relative to the current document. + * + * @param by the value to add to the current position. + * @return new instance of {@link Shift}. + */ + public Shift shift(int by) { + + Shift shift = usesExpression() ? Shift.shift((AggregationExpression) target) : Shift.shift(target.toString()); + return shift.by(by); + } + + private boolean usesExpression() { + return target instanceof AggregationExpression; + } + } + /** * {@link Rank} resolves the current document position (the rank) relative to other documents. If multiple documents * occupy the same rank, {@literal $rank} places the document with the subsequent value at a rank with a gap. @@ -72,8 +123,8 @@ public Document toDocument(AggregationOperationContext context) { /** * {@link DenseRank} resolves the current document position (the rank) relative to other documents. If multiple - * documents occupy the same rank, {@literal $denseRank} places the document with the subsequent value at the next rank without - * any gaps. + * documents occupy the same rank, {@literal $denseRank} places the document with the subsequent value at the next + * rank without any gaps. * * @author Christoph Strobl * @since 3.3 @@ -99,4 +150,73 @@ public Document toDocument(AggregationOperationContext context) { return new Document("$documentNumber", new Document()); } } + + /** + * Shift applies an expression to a document in a specified position relative to the current document. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Shift extends AbstractAggregationExpression { + + private Shift(Object value) { + super(value); + } + + /** + * Specifies the field to evaluate and return. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Shift}. + */ + public static Shift shift(String fieldReference) { + return new Shift(Collections.singletonMap("output", Fields.field(fieldReference))); + } + + /** + * Specifies the {@link AggregationExpression expression} to evaluate and return. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Shift}. + */ + public static Shift shift(AggregationExpression expression) { + return new Shift(Collections.singletonMap("output", expression)); + } + + /** + * Shift the document position relative to the current. Use a positive value for follow up documents (eg. 1 for the + * next) or a negative value for the predecessor documents (eg. -1 for the previous). + * + * @param shiftBy value to add to the current position. + * @return new instance of {@link Shift}. + */ + public Shift by(int shiftBy) { + return new Shift(append("by", shiftBy)); + } + + /** + * Define the default value if the target document is out of range. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Shift}. + */ + public Shift defaultTo(Object value) { + return new Shift(append("default", value)); + } + + /** + * Define the {@link AggregationExpression expression} to evaluate if the target document is out of range. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Shift}. + */ + public Shift defaultToValueOf(AggregationExpression expression) { + return defaultTo(expression); + } + + @Override + protected String getMongoMethod() { + return "$shift"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index a184c49be8..4052b2cbaa 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -72,6 +72,7 @@ public class MethodReferenceNode extends ExpressionNode { map.put("rank", emptyRef().forOperator("$rank")); map.put("denseRank", emptyRef().forOperator("$denseRank")); map.put("documentNumber", emptyRef().forOperator("$documentNumber")); + map.put("shift", mapArgRef().forOperator("$shift").mappingParametersTo("output", "by", "default")); // ARITHMETIC OPERATORS map.put("abs", singleArgRef().forOperator("$abs")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java index c6604bc543..4a29db60df 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java @@ -39,6 +39,21 @@ void rendersDenseRank() { @Test // GH-3717 void rendersDocumentNumber() { - assertThat(documentNumber().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(new Document("$documentNumber", new Document())); + assertThat(documentNumber().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(new Document("$documentNumber", new Document())); + } + + @Test // GH-3727 + void rendersShift() { + + assertThat(valueOf("quantity").shift(1).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1 } }")); + } + + @Test // GH-3727 + void rendersShiftWithDefault() { + + assertThat(valueOf("quantity").shift(1).defaultTo("Not available").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }")); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 3c20b58d58..334825a829 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -971,6 +971,20 @@ void shouldRenderDocumentNumber() { assertThat(transform("documentNumber()")).isEqualTo(Document.parse("{ $documentNumber : {} }")); } + @Test // GH-3727 + void rendersShift() { + + assertThat(transform("shift(quantity, 1)")) + .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1 } }")); + } + + @Test // GH-3727 + void rendersShiftWithDefault() { + + assertThat(transform("shift(quantity, 1, 'Not available')")) + .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From c8a791d36769e9fdd09568def1cce8c2bd524b70 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 24 Aug 2021 10:59:06 +0200 Subject: [PATCH 083/983] Polishing. Make fields final where possible. Update javadoc. Simplify assertions. Update reference docs. See: #3715, See #3717, See #3727 Original pull request: #3741. --- .../core/aggregation/DocumentOperators.java | 2 +- .../SpelExpressionTransformer.java | 2 +- .../core/spel/ExpressionTransformer.java | 2 +- .../DocumentOperatorsUnitTests.java | 15 +- .../SpelExpressionTransformerUnitTests.java | 546 +++++++++--------- src/main/asciidoc/reference/mongodb.adoc | 7 +- 6 files changed, 291 insertions(+), 283 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java index 76fa591e45..40e0065a66 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DocumentOperators.java @@ -82,7 +82,7 @@ public static DocumentNumber documentNumber() { */ public static class DocumentOperatorsFactory { - private Object target; + private final Object target; public DocumentOperatorsFactory(Object target) { this.target = target; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java index e00740945b..f47e062238 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformer.java @@ -102,7 +102,7 @@ public Object transform(String expression, AggregationOperationContext context, ExpressionState state = new ExpressionState(new StandardEvaluationContext(params), CONFIG); ExpressionNode node = ExpressionNode.from(spelExpression.getAST(), state); - return transform(new AggregationExpressionTransformationContext(node, null, null, context)); + return transform(new AggregationExpressionTransformationContext<>(node, null, null, context)); } /* diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformer.java index d35ed2800a..3f3b405cc6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/ExpressionTransformer.java @@ -16,7 +16,7 @@ package org.springframework.data.mongodb.core.spel; /** - * SPI interface to implement components that can transfrom an {@link ExpressionTransformationContextSupport} into an + * SPI interface to implement components that can transform an {@link ExpressionTransformationContextSupport} into an * object. * * @author Oliver Gierke diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java index 4a29db60df..5cd0d4271e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DocumentOperatorsUnitTests.java @@ -15,45 +15,46 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.aggregation.DocumentOperators.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; -import org.bson.Document; import org.junit.jupiter.api.Test; /** + * Unit tests for {@link DocumentOperators}. + * * @author Christoph Strobl */ class DocumentOperatorsUnitTests { @Test // GH-3715 void rendersRank() { - assertThat(rank().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(new Document("$rank", new Document())); + assertThat(rank().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $rank: { } }"); } @Test // GH-3715 void rendersDenseRank() { assertThat(denseRank().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(new Document("$denseRank", new Document())); + .isEqualTo("{ $denseRank: { } }"); } @Test // GH-3717 void rendersDocumentNumber() { assertThat(documentNumber().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(new Document("$documentNumber", new Document())); + .isEqualTo("{ $documentNumber: { } }"); } @Test // GH-3727 void rendersShift() { assertThat(valueOf("quantity").shift(1).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1 } }")); + .isEqualTo("{ $shift: { output: \"$quantity\", by: 1 } }"); } @Test // GH-3727 void rendersShiftWithDefault() { assertThat(valueOf("quantity").shift(1).defaultTo("Not available").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }")); + .isEqualTo("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 334825a829..ee55818018 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -24,6 +24,7 @@ import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import org.springframework.data.mongodb.core.Person; +import org.springframework.lang.Nullable; /** * Unit tests for {@link SpelExpressionTransformer}. @@ -34,12 +35,12 @@ */ public class SpelExpressionTransformerUnitTests { - SpelExpressionTransformer transformer = new SpelExpressionTransformer(); + private SpelExpressionTransformer transformer = new SpelExpressionTransformer(); - Data data; + private Data data; @BeforeEach - public void beforeEach() { + void beforeEach() { this.data = new Data(); this.data.primitiveLongValue = 42; @@ -50,118 +51,118 @@ public void beforeEach() { } @Test // DATAMONGO-774 - public void shouldRenderConstantExpression() { + void shouldRenderConstantExpression() { - assertThat(transform("1")).isEqualTo((Object) "1"); - assertThat(transform("-1")).isEqualTo((Object) "-1"); - assertThat(transform("1.0")).isEqualTo((Object) "1.0"); - assertThat(transform("-1.0")).isEqualTo((Object) "-1.0"); + assertThat(transform("1")).isEqualTo("1"); + assertThat(transform("-1")).isEqualTo("-1"); + assertThat(transform("1.0")).isEqualTo("1.0"); + assertThat(transform("-1.0")).isEqualTo("-1.0"); assertThat(transform("null")).isNull(); } @Test // DATAMONGO-774 - public void shouldSupportKnownOperands() { + void shouldSupportKnownOperands() { - assertThat(transform("a + b")).isEqualTo((Object) Document.parse("{ \"$add\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a - b")).isEqualTo((Object) Document.parse("{ \"$subtract\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a * b")).isEqualTo((Object) Document.parse("{ \"$multiply\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a / b")).isEqualTo((Object) Document.parse("{ \"$divide\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a % b")).isEqualTo((Object) Document.parse("{ \"$mod\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("a + b")).isEqualTo(Document.parse("{ \"$add\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("a - b")).isEqualTo(Document.parse("{ \"$subtract\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("a * b")).isEqualTo(Document.parse("{ \"$multiply\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("a / b")).isEqualTo(Document.parse("{ \"$divide\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("a % b")).isEqualTo(Document.parse("{ \"$mod\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-774 - public void shouldThrowExceptionOnUnknownOperand() { + void shouldThrowExceptionOnUnknownOperand() { assertThatIllegalArgumentException().isThrownBy(() -> transform("a++")); } @Test // DATAMONGO-774 - public void shouldRenderSumExpression() { - assertThat(transform("a + 1")).isEqualTo((Object) Document.parse("{ \"$add\" : [ \"$a\" , 1]}")); + void shouldRenderSumExpression() { + assertThat(transform("a + 1")).isEqualTo(Document.parse("{ \"$add\" : [ \"$a\" , 1]}")); } @Test // DATAMONGO-774 - public void shouldRenderFormula() { + void shouldRenderFormula() { - assertThat(transform("(netPrice + surCharge) * taxrate + 42")).isEqualTo((Object) Document.parse( + assertThat(transform("(netPrice + surCharge) * taxrate + 42")).isEqualTo(Document.parse( "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}")); } @Test // DATAMONGO-774 - public void shouldRenderFormulaInCurlyBrackets() { + void shouldRenderFormulaInCurlyBrackets() { - assertThat(transform("{(netPrice + surCharge) * taxrate + 42}")).isEqualTo((Object) Document.parse( + assertThat(transform("{(netPrice + surCharge) * taxrate + 42}")).isEqualTo(Document.parse( "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}")); } @Test // DATAMONGO-774 - public void shouldRenderFieldReference() { + void shouldRenderFieldReference() { - assertThat(transform("foo")).isEqualTo((Object) "$foo"); - assertThat(transform("$foo")).isEqualTo((Object) "$foo"); + assertThat(transform("foo")).isEqualTo("$foo"); + assertThat(transform("$foo")).isEqualTo("$foo"); } @Test // DATAMONGO-774 - public void shouldRenderNestedFieldReference() { + void shouldRenderNestedFieldReference() { - assertThat(transform("foo.bar")).isEqualTo((Object) "$foo.bar"); - assertThat(transform("$foo.bar")).isEqualTo((Object) "$foo.bar"); + assertThat(transform("foo.bar")).isEqualTo("$foo.bar"); + assertThat(transform("$foo.bar")).isEqualTo("$foo.bar"); } @Test // DATAMONGO-774 @Disabled - public void shouldRenderNestedIndexedFieldReference() { + void shouldRenderNestedIndexedFieldReference() { // TODO add support for rendering nested indexed field references - assertThat(transform("foo[3].bar")).isEqualTo((Object) "$foo[3].bar"); + assertThat(transform("foo[3].bar")).isEqualTo("$foo[3].bar"); } @Test // DATAMONGO-774 - public void shouldRenderConsecutiveOperation() { - assertThat(transform("1 + 1 + 1")).isEqualTo((Object) Document.parse("{ \"$add\" : [ 1 , 1 , 1]}")); + void shouldRenderConsecutiveOperation() { + assertThat(transform("1 + 1 + 1")).isEqualTo(Document.parse("{ \"$add\" : [ 1 , 1 , 1]}")); } @Test // DATAMONGO-774 - public void shouldRenderComplexExpression0() { + void shouldRenderComplexExpression0() { assertThat(transform("-(1 + q)")) - .isEqualTo((Object) Document.parse("{ \"$multiply\" : [ -1 , { \"$add\" : [ 1 , \"$q\"]}]}")); + .isEqualTo(Document.parse("{ \"$multiply\" : [ -1 , { \"$add\" : [ 1 , \"$q\"]}]}")); } @Test // DATAMONGO-774 - public void shouldRenderComplexExpression1() { + void shouldRenderComplexExpression1() { - assertThat(transform("1 + (q + 1) / (q - 1)")).isEqualTo((Object) Document.parse( + assertThat(transform("1 + (q + 1) / (q - 1)")).isEqualTo(Document.parse( "{ \"$add\" : [ 1 , { \"$divide\" : [ { \"$add\" : [ \"$q\" , 1]} , { \"$subtract\" : [ \"$q\" , 1]}]}]}")); } @Test // DATAMONGO-774 - public void shouldRenderComplexExpression2() { + void shouldRenderComplexExpression2() { - assertThat(transform("(q + 1 + 4 - 5) / (q + 1 + 3 + 4)")).isEqualTo((Object) Document.parse( + assertThat(transform("(q + 1 + 4 - 5) / (q + 1 + 3 + 4)")).isEqualTo(Document.parse( "{ \"$divide\" : [ { \"$subtract\" : [ { \"$add\" : [ \"$q\" , 1 , 4]} , 5]} , { \"$add\" : [ \"$q\" , 1 , 3 , 4]}]}")); } @Test // DATAMONGO-774 - public void shouldRenderBinaryExpressionWithMixedSignsCorrectly() { + void shouldRenderBinaryExpressionWithMixedSignsCorrectly() { - assertThat(transform("-4 + 1")).isEqualTo((Object) Document.parse("{ \"$add\" : [ -4 , 1]}")); - assertThat(transform("1 + -4")).isEqualTo((Object) Document.parse("{ \"$add\" : [ 1 , -4]}")); + assertThat(transform("-4 + 1")).isEqualTo(Document.parse("{ \"$add\" : [ -4 , 1]}")); + assertThat(transform("1 + -4")).isEqualTo(Document.parse("{ \"$add\" : [ 1 , -4]}")); } @Test // DATAMONGO-774 - public void shouldRenderConsecutiveOperationsInComplexExpression() { + void shouldRenderConsecutiveOperationsInComplexExpression() { assertThat(transform("1 + 1 + (1 + 1 + 1) / q")).isEqualTo( - (Object) Document.parse("{ \"$add\" : [ 1 , 1 , { \"$divide\" : [ { \"$add\" : [ 1 , 1 , 1]} , \"$q\"]}]}")); + Document.parse("{ \"$add\" : [ 1 , 1 , { \"$divide\" : [ { \"$add\" : [ 1 , 1 , 1]} , \"$q\"]}]}")); } @Test // DATAMONGO-774 - public void shouldRenderParameterExpressionResults() { - assertThat(transform("[0] + [1] + [2]", 1, 2, 3)).isEqualTo((Object) Document.parse("{ \"$add\" : [ 1 , 2 , 3]}")); + void shouldRenderParameterExpressionResults() { + assertThat(transform("[0] + [1] + [2]", 1, 2, 3)).isEqualTo(Document.parse("{ \"$add\" : [ 1 , 2 , 3]}")); } @Test // DATAMONGO-774 - public void shouldRenderNestedParameterExpressionResults() { + void shouldRenderNestedParameterExpressionResults() { assertThat( ((Document) transform("[0].primitiveLongValue + [0].primitiveDoubleValue + [0].doubleValue.longValue()", data)) @@ -171,7 +172,7 @@ public void shouldRenderNestedParameterExpressionResults() { } @Test // DATAMONGO-774 - public void shouldRenderNestedParameterExpressionResultsInNestedExpressions() { + void shouldRenderNestedParameterExpressionResultsInNestedExpressions() { Document target = ((Document) transform( "((1 + [0].primitiveLongValue) + [0].primitiveDoubleValue) * [0].doubleValue.longValue()", data)); @@ -184,765 +185,767 @@ public void shouldRenderNestedParameterExpressionResultsInNestedExpressions() { } @Test // DATAMONGO-840 - public void shouldRenderCompoundExpressionsWithIndexerAndFieldReference() { + void shouldRenderCompoundExpressionsWithIndexerAndFieldReference() { Person person = new Person(); person.setAge(10); assertThat(transform("[0].age + a.c", person)) - .isEqualTo((Object) Document.parse("{ \"$add\" : [ 10 , \"$a.c\"] }")); + .isEqualTo(Document.parse("{ \"$add\" : [ 10 , \"$a.c\"] }")); } @Test // DATAMONGO-840 - public void shouldRenderCompoundExpressionsWithOnlyFieldReferences() { + void shouldRenderCompoundExpressionsWithOnlyFieldReferences() { - assertThat(transform("a.b + a.c")).isEqualTo((Object) Document.parse("{ \"$add\" : [ \"$a.b\" , \"$a.c\"]}")); + assertThat(transform("a.b + a.c")).isEqualTo(Document.parse("{ \"$add\" : [ \"$a.b\" , \"$a.c\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeAnd() { - assertThat(transform("and(a, b)")).isEqualTo((Object) Document.parse("{ \"$and\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeAnd() { + assertThat(transform("and(a, b)")).isEqualTo(Document.parse("{ \"$and\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeOr() { - assertThat(transform("or(a, b)")).isEqualTo((Object) Document.parse("{ \"$or\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeOr() { + assertThat(transform("or(a, b)")).isEqualTo(Document.parse("{ \"$or\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeNot() { - assertThat(transform("not(a)")).isEqualTo((Object) Document.parse("{ \"$not\" : [ \"$a\"]}")); + void shouldRenderMethodReferenceNodeNot() { + assertThat(transform("not(a)")).isEqualTo(Document.parse("{ \"$not\" : [ \"$a\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSetEquals() { + void shouldRenderMethodReferenceNodeSetEquals() { assertThat(transform("setEquals(a, b)")) - .isEqualTo((Object) Document.parse("{ \"$setEquals\" : [ \"$a\" , \"$b\"]}")); + .isEqualTo(Document.parse("{ \"$setEquals\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSetEqualsForArrays() { + void shouldRenderMethodReferenceNodeSetEqualsForArrays() { assertThat(transform("setEquals(new int[]{1,2,3}, new int[]{4,5,6})")) - .isEqualTo((Object) Document.parse("{ \"$setEquals\" : [ [ 1 , 2 , 3] , [ 4 , 5 , 6]]}")); + .isEqualTo(Document.parse("{ \"$setEquals\" : [ [ 1 , 2 , 3] , [ 4 , 5 , 6]]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSetEqualsMixedArrays() { + void shouldRenderMethodReferenceNodeSetEqualsMixedArrays() { assertThat(transform("setEquals(a, new int[]{4,5,6})")) - .isEqualTo((Object) Document.parse("{ \"$setEquals\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo(Document.parse("{ \"$setEquals\" : [ \"$a\" , [ 4 , 5 , 6]]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSetIntersection() { + void shouldRenderMethodReferenceSetIntersection() { assertThat(transform("setIntersection(a, new int[]{4,5,6})")) - .isEqualTo((Object) Document.parse("{ \"$setIntersection\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo(Document.parse("{ \"$setIntersection\" : [ \"$a\" , [ 4 , 5 , 6]]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSetUnion() { + void shouldRenderMethodReferenceSetUnion() { assertThat(transform("setUnion(a, new int[]{4,5,6})")) - .isEqualTo((Object) Document.parse("{ \"$setUnion\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo(Document.parse("{ \"$setUnion\" : [ \"$a\" , [ 4 , 5 , 6]]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSeDifference() { + void shouldRenderMethodReferenceSeDifference() { assertThat(transform("setDifference(a, new int[]{4,5,6})")) - .isEqualTo((Object) Document.parse("{ \"$setDifference\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo(Document.parse("{ \"$setDifference\" : [ \"$a\" , [ 4 , 5 , 6]]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSetIsSubset() { + void shouldRenderMethodReferenceSetIsSubset() { assertThat(transform("setIsSubset(a, new int[]{4,5,6})")) - .isEqualTo((Object) Document.parse("{ \"$setIsSubset\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo(Document.parse("{ \"$setIsSubset\" : [ \"$a\" , [ 4 , 5 , 6]]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceAnyElementTrue() { - assertThat(transform("anyElementTrue(a)")).isEqualTo((Object) Document.parse("{ \"$anyElementTrue\" : [ \"$a\"]}")); + void shouldRenderMethodReferenceAnyElementTrue() { + assertThat(transform("anyElementTrue(a)")).isEqualTo(Document.parse("{ \"$anyElementTrue\" : [ \"$a\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceAllElementsTrue() { + void shouldRenderMethodReferenceAllElementsTrue() { assertThat(transform("allElementsTrue(a, new int[]{4,5,6})")) - .isEqualTo((Object) Document.parse("{ \"$allElementsTrue\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo(Document.parse("{ \"$allElementsTrue\" : [ \"$a\" , [ 4 , 5 , 6]]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceCmp() { - assertThat(transform("cmp(a, 250)")).isEqualTo((Object) Document.parse("{ \"$cmp\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceCmp() { + assertThat(transform("cmp(a, 250)")).isEqualTo(Document.parse("{ \"$cmp\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceEq() { - assertThat(transform("eq(a, 250)")).isEqualTo((Object) Document.parse("{ \"$eq\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceEq() { + assertThat(transform("eq(a, 250)")).isEqualTo(Document.parse("{ \"$eq\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceGt() { - assertThat(transform("gt(a, 250)")).isEqualTo((Object) Document.parse("{ \"$gt\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceGt() { + assertThat(transform("gt(a, 250)")).isEqualTo(Document.parse("{ \"$gt\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceGte() { - assertThat(transform("gte(a, 250)")).isEqualTo((Object) Document.parse("{ \"$gte\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceGte() { + assertThat(transform("gte(a, 250)")).isEqualTo(Document.parse("{ \"$gte\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLt() { - assertThat(transform("lt(a, 250)")).isEqualTo((Object) Document.parse("{ \"$lt\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceLt() { + assertThat(transform("lt(a, 250)")).isEqualTo(Document.parse("{ \"$lt\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLte() { - assertThat(transform("lte(a, 250)")).isEqualTo((Object) Document.parse("{ \"$lte\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceLte() { + assertThat(transform("lte(a, 250)")).isEqualTo(Document.parse("{ \"$lte\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNe() { - assertThat(transform("ne(a, 250)")).isEqualTo((Object) Document.parse("{ \"$ne\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceNe() { + assertThat(transform("ne(a, 250)")).isEqualTo(Document.parse("{ \"$ne\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceAbs() { - assertThat(transform("abs(1)")).isEqualTo((Object) Document.parse("{ \"$abs\" : 1}")); + void shouldRenderMethodReferenceAbs() { + assertThat(transform("abs(1)")).isEqualTo(Document.parse("{ \"$abs\" : 1}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceAdd() { - assertThat(transform("add(a, 250)")).isEqualTo((Object) Document.parse("{ \"$add\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceAdd() { + assertThat(transform("add(a, 250)")).isEqualTo(Document.parse("{ \"$add\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceCeil() { - assertThat(transform("ceil(7.8)")).isEqualTo((Object) Document.parse("{ \"$ceil\" : 7.8}")); + void shouldRenderMethodReferenceCeil() { + assertThat(transform("ceil(7.8)")).isEqualTo(Document.parse("{ \"$ceil\" : 7.8}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceDivide() { - assertThat(transform("divide(a, 250)")).isEqualTo((Object) Document.parse("{ \"$divide\" : [ \"$a\" , 250]}")); + void shouldRenderMethodReferenceDivide() { + assertThat(transform("divide(a, 250)")).isEqualTo(Document.parse("{ \"$divide\" : [ \"$a\" , 250]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceExp() { - assertThat(transform("exp(2)")).isEqualTo((Object) Document.parse("{ \"$exp\" : 2}")); + void shouldRenderMethodReferenceExp() { + assertThat(transform("exp(2)")).isEqualTo(Document.parse("{ \"$exp\" : 2}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceFloor() { - assertThat(transform("floor(2)")).isEqualTo((Object) Document.parse("{ \"$floor\" : 2}")); + void shouldRenderMethodReferenceFloor() { + assertThat(transform("floor(2)")).isEqualTo(Document.parse("{ \"$floor\" : 2}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLn() { - assertThat(transform("ln(2)")).isEqualTo((Object) Document.parse("{ \"$ln\" : 2}")); + void shouldRenderMethodReferenceLn() { + assertThat(transform("ln(2)")).isEqualTo(Document.parse("{ \"$ln\" : 2}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLog() { - assertThat(transform("log(100, 10)")).isEqualTo((Object) Document.parse("{ \"$log\" : [ 100 , 10]}")); + void shouldRenderMethodReferenceLog() { + assertThat(transform("log(100, 10)")).isEqualTo(Document.parse("{ \"$log\" : [ 100 , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLog10() { - assertThat(transform("log10(100)")).isEqualTo((Object) Document.parse("{ \"$log10\" : 100}")); + void shouldRenderMethodReferenceLog10() { + assertThat(transform("log10(100)")).isEqualTo(Document.parse("{ \"$log10\" : 100}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeMod() { - assertThat(transform("mod(a, b)")).isEqualTo((Object) Document.parse("{ \"$mod\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeMod() { + assertThat(transform("mod(a, b)")).isEqualTo(Document.parse("{ \"$mod\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeMultiply() { - assertThat(transform("multiply(a, b)")).isEqualTo((Object) Document.parse("{ \"$multiply\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeMultiply() { + assertThat(transform("multiply(a, b)")).isEqualTo(Document.parse("{ \"$multiply\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodePow() { - assertThat(transform("pow(a, 2)")).isEqualTo((Object) Document.parse("{ \"$pow\" : [ \"$a\" , 2]}")); + void shouldRenderMethodReferenceNodePow() { + assertThat(transform("pow(a, 2)")).isEqualTo(Document.parse("{ \"$pow\" : [ \"$a\" , 2]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSqrt() { - assertThat(transform("sqrt(2)")).isEqualTo((Object) Document.parse("{ \"$sqrt\" : 2}")); + void shouldRenderMethodReferenceSqrt() { + assertThat(transform("sqrt(2)")).isEqualTo(Document.parse("{ \"$sqrt\" : 2}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSubtract() { - assertThat(transform("subtract(a, b)")).isEqualTo((Object) Document.parse("{ \"$subtract\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeSubtract() { + assertThat(transform("subtract(a, b)")).isEqualTo(Document.parse("{ \"$subtract\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceTrunc() { - assertThat(transform("trunc(2.1)")).isEqualTo((Object) Document.parse("{ \"$trunc\" : 2.1}")); + void shouldRenderMethodReferenceTrunc() { + assertThat(transform("trunc(2.1)")).isEqualTo(Document.parse("{ \"$trunc\" : 2.1}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeConcat() { + void shouldRenderMethodReferenceNodeConcat() { assertThat(transform("concat(a, b, 'c')")) - .isEqualTo((Object) Document.parse("{ \"$concat\" : [ \"$a\" , \"$b\" , \"c\"]}")); + .isEqualTo(Document.parse("{ \"$concat\" : [ \"$a\" , \"$b\" , \"c\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSubstrc() { - assertThat(transform("substr(a, 0, 1)")).isEqualTo((Object) Document.parse("{ \"$substr\" : [ \"$a\" , 0 , 1]}")); + void shouldRenderMethodReferenceNodeSubstrc() { + assertThat(transform("substr(a, 0, 1)")).isEqualTo(Document.parse("{ \"$substr\" : [ \"$a\" , 0 , 1]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceToLower() { - assertThat(transform("toLower(a)")).isEqualTo((Object) Document.parse("{ \"$toLower\" : \"$a\"}")); + void shouldRenderMethodReferenceToLower() { + assertThat(transform("toLower(a)")).isEqualTo(Document.parse("{ \"$toLower\" : \"$a\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceToUpper() { - assertThat(transform("toUpper(a)")).isEqualTo((Object) Document.parse("{ \"$toUpper\" : \"$a\"}")); + void shouldRenderMethodReferenceToUpper() { + assertThat(transform("toUpper(a)")).isEqualTo(Document.parse("{ \"$toUpper\" : \"$a\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeStrCaseCmp() { + void shouldRenderMethodReferenceNodeStrCaseCmp() { assertThat(transform("strcasecmp(a, b)")) - .isEqualTo((Object) Document.parse("{ \"$strcasecmp\" : [ \"$a\" , \"$b\"]}")); + .isEqualTo(Document.parse("{ \"$strcasecmp\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceMeta() { - assertThat(transform("meta('textScore')")).isEqualTo((Object) Document.parse("{ \"$meta\" : \"textScore\"}")); + void shouldRenderMethodReferenceMeta() { + assertThat(transform("meta('textScore')")).isEqualTo(Document.parse("{ \"$meta\" : \"textScore\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeArrayElemAt() { + void shouldRenderMethodReferenceNodeArrayElemAt() { assertThat(transform("arrayElemAt(a, 10)")) - .isEqualTo((Object) Document.parse("{ \"$arrayElemAt\" : [ \"$a\" , 10]}")); + .isEqualTo(Document.parse("{ \"$arrayElemAt\" : [ \"$a\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeConcatArrays() { + void shouldRenderMethodReferenceNodeConcatArrays() { assertThat(transform("concatArrays(a, b, c)")) - .isEqualTo((Object) Document.parse("{ \"$concatArrays\" : [ \"$a\" , \"$b\" , \"$c\"]}")); + .isEqualTo(Document.parse("{ \"$concatArrays\" : [ \"$a\" , \"$b\" , \"$c\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeFilter() { - assertThat(transform("filter(a, 'num', '$$num' > 10)")).isEqualTo((Object) Document.parse( + void shouldRenderMethodReferenceNodeFilter() { + assertThat(transform("filter(a, 'num', '$$num' > 10)")).isEqualTo(Document.parse( "{ \"$filter\" : { \"input\" : \"$a\" , \"as\" : \"num\" , \"cond\" : { \"$gt\" : [ \"$$num\" , 10]}}}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceIsArray() { - assertThat(transform("isArray(a)")).isEqualTo((Object) Document.parse("{ \"$isArray\" : \"$a\"}")); + void shouldRenderMethodReferenceIsArray() { + assertThat(transform("isArray(a)")).isEqualTo(Document.parse("{ \"$isArray\" : \"$a\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceIsSize() { - assertThat(transform("size(a)")).isEqualTo((Object) Document.parse("{ \"$size\" : \"$a\"}")); + void shouldRenderMethodReferenceIsSize() { + assertThat(transform("size(a)")).isEqualTo(Document.parse("{ \"$size\" : \"$a\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSlice() { - assertThat(transform("slice(a, 10)")).isEqualTo((Object) Document.parse("{ \"$slice\" : [ \"$a\" , 10]}")); + void shouldRenderMethodReferenceNodeSlice() { + assertThat(transform("slice(a, 10)")).isEqualTo(Document.parse("{ \"$slice\" : [ \"$a\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeMap() { - assertThat(transform("map(quizzes, 'grade', '$$grade' + 2)")).isEqualTo((Object) Document.parse( + void shouldRenderMethodReferenceNodeMap() { + assertThat(transform("map(quizzes, 'grade', '$$grade' + 2)")).isEqualTo(Document.parse( "{ \"$map\" : { \"input\" : \"$quizzes\" , \"as\" : \"grade\" , \"in\" : { \"$add\" : [ \"$$grade\" , 2]}}}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeLet() { - assertThat(transform("let({low:1, high:'$$low'}, gt('$$low', '$$high'))")).isEqualTo((Object) Document.parse( + void shouldRenderMethodReferenceNodeLet() { + assertThat(transform("let({low:1, high:'$$low'}, gt('$$low', '$$high'))")).isEqualTo(Document.parse( "{ \"$let\" : { \"vars\" : { \"low\" : 1 , \"high\" : \"$$low\"} , \"in\" : { \"$gt\" : [ \"$$low\" , \"$$high\"]}}}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLiteral() { - assertThat(transform("literal($1)")).isEqualTo((Object) Document.parse("{ \"$literal\" : \"$1\"}")); + void shouldRenderMethodReferenceLiteral() { + assertThat(transform("literal($1)")).isEqualTo(Document.parse("{ \"$literal\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceDayOfYear() { - assertThat(transform("dayOfYear($1)")).isEqualTo((Object) Document.parse("{ \"$dayOfYear\" : \"$1\"}")); + void shouldRenderMethodReferenceDayOfYear() { + assertThat(transform("dayOfYear($1)")).isEqualTo(Document.parse("{ \"$dayOfYear\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceDayOfMonth() { - assertThat(transform("dayOfMonth($1)")).isEqualTo((Object) Document.parse("{ \"$dayOfMonth\" : \"$1\"}")); + void shouldRenderMethodReferenceDayOfMonth() { + assertThat(transform("dayOfMonth($1)")).isEqualTo(Document.parse("{ \"$dayOfMonth\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceDayOfWeek() { - assertThat(transform("dayOfWeek($1)")).isEqualTo((Object) Document.parse("{ \"$dayOfWeek\" : \"$1\"}")); + void shouldRenderMethodReferenceDayOfWeek() { + assertThat(transform("dayOfWeek($1)")).isEqualTo(Document.parse("{ \"$dayOfWeek\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceYear() { - assertThat(transform("year($1)")).isEqualTo((Object) Document.parse("{ \"$year\" : \"$1\"}")); + void shouldRenderMethodReferenceYear() { + assertThat(transform("year($1)")).isEqualTo(Document.parse("{ \"$year\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceMonth() { - assertThat(transform("month($1)")).isEqualTo((Object) Document.parse("{ \"$month\" : \"$1\"}")); + void shouldRenderMethodReferenceMonth() { + assertThat(transform("month($1)")).isEqualTo(Document.parse("{ \"$month\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceWeek() { - assertThat(transform("week($1)")).isEqualTo((Object) Document.parse("{ \"$week\" : \"$1\"}")); + void shouldRenderMethodReferenceWeek() { + assertThat(transform("week($1)")).isEqualTo(Document.parse("{ \"$week\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceHour() { - assertThat(transform("hour($1)")).isEqualTo((Object) Document.parse("{ \"$hour\" : \"$1\"}")); + void shouldRenderMethodReferenceHour() { + assertThat(transform("hour($1)")).isEqualTo(Document.parse("{ \"$hour\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceMinute() { - assertThat(transform("minute($1)")).isEqualTo((Object) Document.parse("{ \"$minute\" : \"$1\"}")); + void shouldRenderMethodReferenceMinute() { + assertThat(transform("minute($1)")).isEqualTo(Document.parse("{ \"$minute\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceSecond() { - assertThat(transform("second($1)")).isEqualTo((Object) Document.parse("{ \"$second\" : \"$1\"}")); + void shouldRenderMethodReferenceSecond() { + assertThat(transform("second($1)")).isEqualTo(Document.parse("{ \"$second\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceMillisecond() { - assertThat(transform("millisecond($1)")).isEqualTo((Object) Document.parse("{ \"$millisecond\" : \"$1\"}")); + void shouldRenderMethodReferenceMillisecond() { + assertThat(transform("millisecond($1)")).isEqualTo(Document.parse("{ \"$millisecond\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceDateToString() { + void shouldRenderMethodReferenceDateToString() { assertThat(transform("dateToString('%Y-%m-%d', $date)")).isEqualTo( - (Object) Document.parse("{ \"$dateToString\" : { \"format\" : \"%Y-%m-%d\" , \"date\" : \"$date\"}}")); + Document.parse("{ \"$dateToString\" : { \"format\" : \"%Y-%m-%d\" , \"date\" : \"$date\"}}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceCond() { - assertThat(transform("cond(qty > 250, 30, 20)")).isEqualTo((Object) Document + void shouldRenderMethodReferenceCond() { + assertThat(transform("cond(qty > 250, 30, 20)")).isEqualTo( + Document .parse("{ \"$cond\" : { \"if\" : { \"$gt\" : [ \"$qty\" , 250]} , \"then\" : 30 , \"else\" : 20}}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeIfNull() { - assertThat(transform("ifNull(a, 10)")).isEqualTo((Object) Document.parse("{ \"$ifNull\" : [ \"$a\" , 10]}")); + void shouldRenderMethodReferenceNodeIfNull() { + assertThat(transform("ifNull(a, 10)")).isEqualTo(Document.parse("{ \"$ifNull\" : [ \"$a\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeSum() { - assertThat(transform("sum(a, b)")).isEqualTo((Object) Document.parse("{ \"$sum\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeSum() { + assertThat(transform("sum(a, b)")).isEqualTo(Document.parse("{ \"$sum\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeAvg() { - assertThat(transform("avg(a, b)")).isEqualTo((Object) Document.parse("{ \"$avg\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeAvg() { + assertThat(transform("avg(a, b)")).isEqualTo(Document.parse("{ \"$avg\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceFirst() { - assertThat(transform("first($1)")).isEqualTo((Object) Document.parse("{ \"$first\" : \"$1\"}")); + void shouldRenderMethodReferenceFirst() { + assertThat(transform("first($1)")).isEqualTo(Document.parse("{ \"$first\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceLast() { - assertThat(transform("last($1)")).isEqualTo((Object) Document.parse("{ \"$last\" : \"$1\"}")); + void shouldRenderMethodReferenceLast() { + assertThat(transform("last($1)")).isEqualTo(Document.parse("{ \"$last\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeMax() { - assertThat(transform("max(a, b)")).isEqualTo((Object) Document.parse("{ \"$max\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeMax() { + assertThat(transform("max(a, b)")).isEqualTo(Document.parse("{ \"$max\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeMin() { - assertThat(transform("min(a, b)")).isEqualTo((Object) Document.parse("{ \"$min\" : [ \"$a\" , \"$b\"]}")); + void shouldRenderMethodReferenceNodeMin() { + assertThat(transform("min(a, b)")).isEqualTo(Document.parse("{ \"$min\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodePush() { + void shouldRenderMethodReferenceNodePush() { assertThat(transform("push({'item':'$item', 'quantity':'$qty'})")) - .isEqualTo((Object) Document.parse("{ \"$push\" : { \"item\" : \"$item\" , \"quantity\" : \"$qty\"}}")); + .isEqualTo(Document.parse("{ \"$push\" : { \"item\" : \"$item\" , \"quantity\" : \"$qty\"}}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceAddToSet() { - assertThat(transform("addToSet($1)")).isEqualTo((Object) Document.parse("{ \"$addToSet\" : \"$1\"}")); + void shouldRenderMethodReferenceAddToSet() { + assertThat(transform("addToSet($1)")).isEqualTo(Document.parse("{ \"$addToSet\" : \"$1\"}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeStdDevPop() { + void shouldRenderMethodReferenceNodeStdDevPop() { assertThat(transform("stdDevPop(scores.score)")) - .isEqualTo((Object) Document.parse("{ \"$stdDevPop\" : [ \"$scores.score\"]}")); + .isEqualTo(Document.parse("{ \"$stdDevPop\" : [ \"$scores.score\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderMethodReferenceNodeStdDevSamp() { - assertThat(transform("stdDevSamp(age)")).isEqualTo((Object) Document.parse("{ \"$stdDevSamp\" : [ \"$age\"]}")); + void shouldRenderMethodReferenceNodeStdDevSamp() { + assertThat(transform("stdDevSamp(age)")).isEqualTo(Document.parse("{ \"$stdDevSamp\" : [ \"$age\"]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeEq() { - assertThat(transform("foo == 10")).isEqualTo((Object) Document.parse("{ \"$eq\" : [ \"$foo\" , 10]}")); + void shouldRenderOperationNodeEq() { + assertThat(transform("foo == 10")).isEqualTo(Document.parse("{ \"$eq\" : [ \"$foo\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeNe() { - assertThat(transform("foo != 10")).isEqualTo((Object) Document.parse("{ \"$ne\" : [ \"$foo\" , 10]}")); + void shouldRenderOperationNodeNe() { + assertThat(transform("foo != 10")).isEqualTo(Document.parse("{ \"$ne\" : [ \"$foo\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeGt() { - assertThat(transform("foo > 10")).isEqualTo((Object) Document.parse("{ \"$gt\" : [ \"$foo\" , 10]}")); + void shouldRenderOperationNodeGt() { + assertThat(transform("foo > 10")).isEqualTo(Document.parse("{ \"$gt\" : [ \"$foo\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeGte() { - assertThat(transform("foo >= 10")).isEqualTo((Object) Document.parse("{ \"$gte\" : [ \"$foo\" , 10]}")); + void shouldRenderOperationNodeGte() { + assertThat(transform("foo >= 10")).isEqualTo(Document.parse("{ \"$gte\" : [ \"$foo\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeLt() { - assertThat(transform("foo < 10")).isEqualTo((Object) Document.parse("{ \"$lt\" : [ \"$foo\" , 10]}")); + void shouldRenderOperationNodeLt() { + assertThat(transform("foo < 10")).isEqualTo(Document.parse("{ \"$lt\" : [ \"$foo\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeLte() { - assertThat(transform("foo <= 10")).isEqualTo((Object) Document.parse("{ \"$lte\" : [ \"$foo\" , 10]}")); + void shouldRenderOperationNodeLte() { + assertThat(transform("foo <= 10")).isEqualTo(Document.parse("{ \"$lte\" : [ \"$foo\" , 10]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodePow() { - assertThat(transform("foo^2")).isEqualTo((Object) Document.parse("{ \"$pow\" : [ \"$foo\" , 2]}")); + void shouldRenderOperationNodePow() { + assertThat(transform("foo^2")).isEqualTo(Document.parse("{ \"$pow\" : [ \"$foo\" , 2]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeOr() { - assertThat(transform("true || false")).isEqualTo((Object) Document.parse("{ \"$or\" : [ true , false]}")); + void shouldRenderOperationNodeOr() { + assertThat(transform("true || false")).isEqualTo(Document.parse("{ \"$or\" : [ true , false]}")); } @Test // DATAMONGO-1530 - public void shouldRenderComplexOperationNodeOr() { + void shouldRenderComplexOperationNodeOr() { assertThat(transform("1+2 || concat(a, b) || true")).isEqualTo( - (Object) Document.parse("{ \"$or\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}")); + Document.parse("{ \"$or\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}")); } @Test // DATAMONGO-1530 - public void shouldRenderOperationNodeAnd() { - assertThat(transform("true && false")).isEqualTo((Object) Document.parse("{ \"$and\" : [ true , false]}")); + void shouldRenderOperationNodeAnd() { + assertThat(transform("true && false")).isEqualTo(Document.parse("{ \"$and\" : [ true , false]}")); } @Test // DATAMONGO-1530 - public void shouldRenderComplexOperationNodeAnd() { - assertThat(transform("1+2 && concat(a, b) && true")).isEqualTo((Object) Document + void shouldRenderComplexOperationNodeAnd() { + assertThat(transform("1+2 && concat(a, b) && true")).isEqualTo( + Document .parse("{ \"$and\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}")); } @Test // DATAMONGO-1530 - public void shouldRenderNotCorrectly() { - assertThat(transform("!true")).isEqualTo((Object) Document.parse("{ \"$not\" : [ true]}")); + void shouldRenderNotCorrectly() { + assertThat(transform("!true")).isEqualTo(Document.parse("{ \"$not\" : [ true]}")); } @Test // DATAMONGO-1530 - public void shouldRenderComplexNotCorrectly() { + void shouldRenderComplexNotCorrectly() { assertThat(transform("!(foo > 10)")) - .isEqualTo((Object) Document.parse("{ \"$not\" : [ { \"$gt\" : [ \"$foo\" , 10]}]}")); + .isEqualTo(Document.parse("{ \"$not\" : [ { \"$gt\" : [ \"$foo\" , 10]}]}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceIndexOfBytes() { + void shouldRenderMethodReferenceIndexOfBytes() { assertThat(transform("indexOfBytes(item, 'foo')")) .isEqualTo(Document.parse("{ \"$indexOfBytes\" : [ \"$item\" , \"foo\"]}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceIndexOfCP() { + void shouldRenderMethodReferenceIndexOfCP() { assertThat(transform("indexOfCP(item, 'foo')")) .isEqualTo(Document.parse("{ \"$indexOfCP\" : [ \"$item\" , \"foo\"]}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceSplit() { + void shouldRenderMethodReferenceSplit() { assertThat(transform("split(item, ',')")).isEqualTo(Document.parse("{ \"$split\" : [ \"$item\" , \",\"]}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceStrLenBytes() { + void shouldRenderMethodReferenceStrLenBytes() { assertThat(transform("strLenBytes(item)")).isEqualTo(Document.parse("{ \"$strLenBytes\" : \"$item\"}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceStrLenCP() { + void shouldRenderMethodReferenceStrLenCP() { assertThat(transform("strLenCP(item)")).isEqualTo(Document.parse("{ \"$strLenCP\" : \"$item\"}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodSubstrCP() { + void shouldRenderMethodSubstrCP() { assertThat(transform("substrCP(item, 0, 5)")).isEqualTo(Document.parse("{ \"$substrCP\" : [ \"$item\" , 0 , 5]}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceReverseArray() { + void shouldRenderMethodReferenceReverseArray() { assertThat(transform("reverseArray(array)")).isEqualTo(Document.parse("{ \"$reverseArray\" : \"$array\"}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceReduce() { + void shouldRenderMethodReferenceReduce() { assertThat(transform("reduce(field, '', {'$concat':{'$$value','$$this'}})")).isEqualTo(Document.parse( "{ \"$reduce\" : { \"input\" : \"$field\" , \"initialValue\" : \"\" , \"in\" : { \"$concat\" : [ \"$$value\" , \"$$this\"]}}}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceZip() { + void shouldRenderMethodReferenceZip() { assertThat(transform("zip(new String[]{'$array1', '$array2'})")) .isEqualTo(Document.parse("{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"]}}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodReferenceZipWithOptionalArgs() { + void shouldRenderMethodReferenceZipWithOptionalArgs() { assertThat(transform("zip(new String[]{'$array1', '$array2'}, true, new int[]{1,2})")).isEqualTo(Document.parse( "{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"] , \"useLongestLength\" : true , \"defaults\" : [ 1 , 2]}}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodIn() { + void shouldRenderMethodIn() { assertThat(transform("in('item', array)")).isEqualTo(Document.parse("{ \"$in\" : [ \"item\" , \"$array\"]}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodRefereneIsoDayOfWeek() { + void shouldRenderMethodRefereneIsoDayOfWeek() { assertThat(transform("isoDayOfWeek(date)")).isEqualTo(Document.parse("{ \"$isoDayOfWeek\" : \"$date\"}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodRefereneIsoWeek() { + void shouldRenderMethodRefereneIsoWeek() { assertThat(transform("isoWeek(date)")).isEqualTo(Document.parse("{ \"$isoWeek\" : \"$date\"}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodRefereneIsoWeekYear() { + void shouldRenderMethodRefereneIsoWeekYear() { assertThat(transform("isoWeekYear(date)")).isEqualTo(Document.parse("{ \"$isoWeekYear\" : \"$date\"}")); } @Test // DATAMONGO-1548 - public void shouldRenderMethodRefereneType() { + void shouldRenderMethodRefereneType() { assertThat(transform("type(a)")).isEqualTo(Document.parse("{ \"$type\" : \"$a\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderArrayToObjectWithFieldReference() { + void shouldRenderArrayToObjectWithFieldReference() { assertThat(transform("arrayToObject(field)")).isEqualTo(Document.parse("{ \"$arrayToObject\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderArrayToObjectWithArray() { + void shouldRenderArrayToObjectWithArray() { assertThat(transform("arrayToObject(new String[]{'key', 'value'})")) .isEqualTo(Document.parse("{ \"$arrayToObject\" : [\"key\", \"value\"]}")); } @Test // DATAMONGO-2077 - public void shouldRenderObjectToArrayWithFieldReference() { + void shouldRenderObjectToArrayWithFieldReference() { assertThat(transform("objectToArray(field)")).isEqualTo(Document.parse("{ \"$objectToArray\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderMergeObjects() { + void shouldRenderMergeObjects() { assertThat(transform("mergeObjects(field1, $$ROOT)")) .isEqualTo(Document.parse("{ \"$mergeObjects\" : [\"$field1\", \"$$ROOT\"]}")); } @Test // DATAMONGO-2077 - public void shouldRenderTrimWithoutChars() { + void shouldRenderTrimWithoutChars() { assertThat(transform("trim(field)")).isEqualTo(Document.parse("{ \"$trim\" : {\"input\" : \"$field\"}}")); } @Test // DATAMONGO-2077 - public void shouldRenderTrimWithChars() { + void shouldRenderTrimWithChars() { assertThat(transform("trim(field, 'ie')")) .isEqualTo(Document.parse("{ \"$trim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderTrimWithCharsFromFieldReference() { + void shouldRenderTrimWithCharsFromFieldReference() { assertThat(transform("trim(field1, field2)")) .isEqualTo(Document.parse("{ \"$trim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderLtrimWithoutChars() { + void shouldRenderLtrimWithoutChars() { assertThat(transform("ltrim(field)")).isEqualTo(Document.parse("{ \"$ltrim\" : {\"input\" : \"$field\"}}")); } @Test // DATAMONGO-2077 - public void shouldRenderLtrimWithChars() { + void shouldRenderLtrimWithChars() { assertThat(transform("ltrim(field, 'ie')")) .isEqualTo(Document.parse("{ \"$ltrim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderLtrimWithCharsFromFieldReference() { + void shouldRenderLtrimWithCharsFromFieldReference() { assertThat(transform("ltrim(field1, field2)")) .isEqualTo(Document.parse("{ \"$ltrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderRtrimWithoutChars() { + void shouldRenderRtrimWithoutChars() { assertThat(transform("rtrim(field)")).isEqualTo(Document.parse("{ \"$rtrim\" : {\"input\" : \"$field\"}}")); } @Test // DATAMONGO-2077 - public void shouldRenderRtrimWithChars() { + void shouldRenderRtrimWithChars() { assertThat(transform("rtrim(field, 'ie')")) .isEqualTo(Document.parse("{ \"$rtrim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderRtrimWithCharsFromFieldReference() { + void shouldRenderRtrimWithCharsFromFieldReference() { assertThat(transform("rtrim(field1, field2)")) .isEqualTo(Document.parse("{ \"$rtrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderConvertWithoutOptionalParameters() { + void shouldRenderConvertWithoutOptionalParameters() { assertThat(transform("convert(field, 'string')")) .isEqualTo(Document.parse("{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"string\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderConvertWithOnError() { + void shouldRenderConvertWithOnError() { assertThat(transform("convert(field, 'int', 'Not an integer.')")).isEqualTo(Document .parse("{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"int\", \"onError\" : \"Not an integer.\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderConvertWithOnErrorOnNull() { + void shouldRenderConvertWithOnErrorOnNull() { assertThat(transform("convert(field, 'int', 'Not an integer.', -1)")).isEqualTo(Document.parse( "{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"int\", \"onError\" : \"Not an integer.\", \"onNull\" : -1 }}")); } @Test // DATAMONGO-2077 - public void shouldRenderToBool() { + void shouldRenderToBool() { assertThat(transform("toBool(field)")).isEqualTo(Document.parse("{ \"$toBool\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderToDate() { + void shouldRenderToDate() { assertThat(transform("toDate(field)")).isEqualTo(Document.parse("{ \"$toDate\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderToDecimal() { + void shouldRenderToDecimal() { assertThat(transform("toDecimal(field)")).isEqualTo(Document.parse("{ \"$toDecimal\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderToDouble() { + void shouldRenderToDouble() { assertThat(transform("toDouble(field)")).isEqualTo(Document.parse("{ \"$toDouble\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderToInt() { + void shouldRenderToInt() { assertThat(transform("toInt(field)")).isEqualTo(Document.parse("{ \"$toInt\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderToLong() { + void shouldRenderToLong() { assertThat(transform("toLong(field)")).isEqualTo(Document.parse("{ \"$toLong\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderToObjectId() { + void shouldRenderToObjectId() { assertThat(transform("toObjectId(field)")).isEqualTo(Document.parse("{ \"$toObjectId\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderToString() { + void shouldRenderToString() { assertThat(transform("toString(field)")).isEqualTo(Document.parse("{ \"$toString\" : \"$field\"}")); } @Test // DATAMONGO-2077 - public void shouldRenderDateFromStringWithoutOptionalParameters() { + void shouldRenderDateFromStringWithoutOptionalParameters() { assertThat(transform("dateFromString(field)")) .isEqualTo(Document.parse("{ \"$dateFromString\" : {\"dateString\" : \"$field\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderDateFromStringWithFormat() { + void shouldRenderDateFromStringWithFormat() { assertThat(transform("dateFromString(field, 'DD-MM-YYYY')")).isEqualTo( Document.parse("{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderDateFromStringWithFormatAndTimezone() { + void shouldRenderDateFromStringWithFormatAndTimezone() { assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC')")).isEqualTo(Document.parse( "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\" }}")); } @Test // DATAMONGO-2077 - public void shouldRenderDateFromStringWithFormatTimezoneAndOnError() { + void shouldRenderDateFromStringWithFormatTimezoneAndOnError() { assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC', -1)")).isEqualTo(Document.parse( "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\", \"onError\" : -1 }}")); } @Test // DATAMONGO-2077 - public void shouldRenderDateFromStringWithFormatTimezoneOnErrorAndOnNull() { + void shouldRenderDateFromStringWithFormatTimezoneOnErrorAndOnNull() { assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC', -1, -2)")).isEqualTo(Document.parse( "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\", \"onError\" : -1, \"onNull\" : -2}}")); } @Test // DATAMONGO-2077, DATAMONGO-2671 - public void shouldRenderDateFromParts() { + void shouldRenderDateFromParts() { assertThat(transform("dateFromParts(y, m, d, h, mm, s, ms, 'UTC')")).isEqualTo(Document.parse( "{ \"$dateFromParts\" : {\"year\" : \"$y\", \"month\" : \"$m\", \"day\" : \"$d\", \"hour\" : \"$h\", \"minute\" : \"$mm\", \"second\" : \"$s\", \"millisecond\" : \"$ms\", \"timezone\" : \"UTC\"}}")); } @Test // DATAMONGO-2077, DATAMONGO-2671 - public void shouldRenderIsoDateFromParts() { + void shouldRenderIsoDateFromParts() { assertThat(transform("isoDateFromParts(y, m, d, h, mm, s, ms, 'UTC')")).isEqualTo(Document.parse( "{ \"$dateFromParts\" : {\"isoWeekYear\" : \"$y\", \"isoWeek\" : \"$m\", \"isoDayOfWeek\" : \"$d\", \"hour\" : \"$h\", \"minute\" : \"$mm\", \"second\" : \"$s\", \"millisecond\" : \"$ms\", \"timezone\" : \"UTC\"}}")); } @Test // DATAMONGO-2077 - public void shouldRenderDateToParts() { + void shouldRenderDateToParts() { assertThat(transform("dateToParts(field, 'UTC', false)")).isEqualTo( Document.parse("{ \"$dateToParts\" : {\"date\" : \"$field\", \"timezone\" : \"UTC\", \"iso8601\" : false}}")); } @Test // DATAMONGO-2077 - public void shouldRenderIndexOfArray() { + void shouldRenderIndexOfArray() { assertThat(transform("indexOfArray(field, 2)")) .isEqualTo(Document.parse("{ \"$indexOfArray\" : [\"$field\", 2 ]}")); } @Test // DATAMONGO-2077 - public void shouldRenderRange() { + void shouldRenderRange() { assertThat(transform("range(0, 10, 2)")).isEqualTo(Document.parse("{ \"$range\" : [0, 10, 2 ]}")); } @Test // DATAMONGO-2370 - public void shouldRenderRound() { + void shouldRenderRound() { assertThat(transform("round(field)")).isEqualTo(Document.parse("{ \"$round\" : [\"$field\"]}")); } @Test // DATAMONGO-2370 - public void shouldRenderRoundWithPlace() { + void shouldRenderRoundWithPlace() { assertThat(transform("round(field, 2)")).isEqualTo(Document.parse("{ \"$round\" : [\"$field\", 2]}")); } @@ -985,6 +988,7 @@ void rendersShiftWithDefault() { .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }")); } + @Nullable private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc index dfa87dd3c7..daaad49963 100644 --- a/src/main/asciidoc/reference/mongodb.adoc +++ b/src/main/asciidoc/reference/mongodb.adoc @@ -2506,13 +2506,16 @@ At the time of this writing, we provide support for the following Aggregation Op | `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators -| `abs`, `add` (*via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (*via `minus`), `trunc` +| `abs`, `add` (via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (*via `minus`), `trunc` + +| Document Operators +| `rank`, `denseRank`, `documentNumber`, `shift` | String Aggregation Operators | `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` | Comparison Aggregation Operators -| `eq` (*via: `is`), `gt`, `gte`, `lt`, `lte`, `ne` +| `eq` (via: `is`), `gt`, `gte`, `lt`, `lte`, `ne` | Array Aggregation Operators | `arrayElementAt`, `arrayToObject`, `concatArrays`, `filter`, `in`, `indexOfArray`, `isArray`, `range`, `reverseArray`, `reduce`, `size`, `slice`, `zip` From 0c481feb722d3d301445d8135633363b24c1abeb Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 24 Aug 2021 11:06:41 +0200 Subject: [PATCH 084/983] Extract Aggregation Framework and GridFS docs in own source files. Closes #3786 --- .../reference/aggregation-framework.adoc | 656 +++++++++++++++ src/main/asciidoc/reference/gridfs.adoc | 115 +++ src/main/asciidoc/reference/mongodb.adoc | 776 +----------------- 3 files changed, 774 insertions(+), 773 deletions(-) create mode 100644 src/main/asciidoc/reference/aggregation-framework.adoc create mode 100644 src/main/asciidoc/reference/gridfs.adoc diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc new file mode 100644 index 0000000000..a843af17f4 --- /dev/null +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -0,0 +1,656 @@ +[[mongo.aggregation]] +== Aggregation Framework Support + +Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2. + +For further information, see the full https://docs.mongodb.org/manual/aggregation/[reference documentation] of the aggregation framework and other data aggregation tools for MongoDB. + +[[mongo.aggregation.basic-concepts]] +=== Basic Concepts + +The Aggregation Framework support in Spring Data MongoDB is based on the following key abstractions: `Aggregation`, `AggregationDefinition`, and `AggregationResults`. + +* `Aggregation` ++ +An `Aggregation` represents a MongoDB `aggregate` operation and holds the description of the aggregation pipeline instructions. Aggregations are created by invoking the appropriate `newAggregation(…)` static factory method of the `Aggregation` class, which takes a list of `AggregateOperation` and an optional input class. ++ +The actual aggregate operation is run by the `aggregate` method of the `MongoTemplate`, which takes the desired output class as a parameter. ++ +* `TypedAggregation` ++ +A `TypedAggregation`, just like an `Aggregation`, holds the instructions of the aggregation pipeline and a reference to the input type, that is used for mapping domain properties to actual document fields. ++ +At runtime, field references get checked against the given input type, considering potential `@Field` annotations. +[NOTE] +==== +Changed in 3.2 referencing non-existent properties does no longer raise errors. To restore the previous behaviour use the `strictMapping` option of `AggregationOptions`. +==== +* `AggregationDefinition` ++ +An `AggregationDefinition` represents a MongoDB aggregation pipeline operation and describes the processing that should be performed in this aggregation step. Although you could manually create an `AggregationDefinition`, we recommend using the static factory methods provided by the `Aggregate` class to construct an `AggregateOperation`. ++ +* `AggregationResults` ++ +`AggregationResults` is the container for the result of an aggregate operation. It provides access to the raw aggregation result, in the form of a `Document` to the mapped objects and other information about the aggregation. ++ +The following listing shows the canonical example for using the Spring Data MongoDB support for the MongoDB Aggregation Framework: ++ +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +Aggregation agg = newAggregation( + pipelineOP1(), + pipelineOP2(), + pipelineOPn() +); + +AggregationResults results = mongoTemplate.aggregate(agg, "INPUT_COLLECTION_NAME", OutputType.class); +List mappedResult = results.getMappedResults(); +---- + +Note that, if you provide an input class as the first parameter to the `newAggregation` method, the `MongoTemplate` derives the name of the input collection from this class. Otherwise, if you do not not specify an input class, you must provide the name of the input collection explicitly. If both an input class and an input collection are provided, the latter takes precedence. + +[[mongo.aggregation.supported-aggregation-operations]] +=== Supported Aggregation Operations + +The MongoDB Aggregation Framework provides the following types of aggregation operations: + +* Pipeline Aggregation Operators +* Group Aggregation Operators +* Boolean Aggregation Operators +* Comparison Aggregation Operators +* Arithmetic Aggregation Operators +* String Aggregation Operators +* Date Aggregation Operators +* Array Aggregation Operators +* Conditional Aggregation Operators +* Lookup Aggregation Operators +* Convert Aggregation Operators +* Object Aggregation Operators +* Script Aggregation Operators + +At the time of this writing, we provide support for the following Aggregation Operations in Spring Data MongoDB: + +.Aggregation Operations currently supported by Spring Data MongoDB +[cols="2*"] +|=== +| Pipeline Aggregation Operators +| `bucket`, `bucketAuto`, `count`, `facet`, `geoNear`, `graphLookup`, `group`, `limit`, `lookup`, `match`, `project`, `replaceRoot`, `skip`, `sort`, `unwind` + +| Set Aggregation Operators +| `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue` + +| Group/Accumulator Aggregation Operators +| `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp` + +| Arithmetic Aggregation Operators +| `abs`, `add` (via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (*via `minus`), `trunc` + +| Document Operators +| `rank`, `denseRank`, `documentNumber`, `shift` + +| String Aggregation Operators +| `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` + +| Comparison Aggregation Operators +| `eq` (via: `is`), `gt`, `gte`, `lt`, `lte`, `ne` + +| Array Aggregation Operators +| `arrayElementAt`, `arrayToObject`, `concatArrays`, `filter`, `in`, `indexOfArray`, `isArray`, `range`, `reverseArray`, `reduce`, `size`, `slice`, `zip` + +| Literal Operators +| `literal` + +| Date Aggregation Operators +| `dayOfYear`, `dayOfMonth`, `dayOfWeek`, `year`, `month`, `week`, `hour`, `minute`, `second`, `millisecond`, `dateToString`, `dateFromString`, `dateFromParts`, `dateToParts`, `isoDayOfWeek`, `isoWeek`, `isoWeekYear` + +| Variable Operators +| `map` + +| Conditional Aggregation Operators +| `cond`, `ifNull`, `switch` + +| Type Aggregation Operators +| `type` + +| Convert Aggregation Operators +| `convert`, `toBool`, `toDate`, `toDecimal`, `toDouble`, `toInt`, `toLong`, `toObjectId`, `toString` + +| Object Aggregation Operators +| `objectToArray`, `mergeObjects` + +| Script Aggregation Operators +| `function`, `accumulator` +|=== + +* The operation is mapped or added by Spring Data MongoDB. + +Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB.Comparison aggregation operators are expressed as `Criteria` expressions. + +[[mongo.aggregation.projection]] +=== Projection Expressions + +Projection expressions are used to define the fields that are the outcome of a particular aggregation step. Projection expressions can be defined through the `project` method of the `Aggregation` class, either by passing a list of `String` objects or an aggregation framework `Fields` object. The projection can be extended with additional fields through a fluent API by using the `and(String)` method and aliased by using the `as(String)` method. +Note that you can also define fields with aliases by using the `Fields.field` static factory method of the aggregation framework, which you can then use to construct a new `Fields` instance. References to projected fields in later aggregation stages are valid only for the field names of included fields or their aliases (including newly defined fields and their aliases). Fields not included in the projection cannot be referenced in later aggregation stages. The following listings show examples of projection expression: + +.Projection expression examples +==== +[source,java] +---- +// generates {$project: {name: 1, netPrice: 1}} +project("name", "netPrice") + +// generates {$project: {thing1: $thing2}} +project().and("thing1").as("thing2") + +// generates {$project: {a: 1, b: 1, thing2: $thing1}} +project("a","b").and("thing1").as("thing2") +---- +==== + +.Multi-Stage Aggregation using Projection and Sorting +==== +[source,java] +---- +// generates {$project: {name: 1, netPrice: 1}}, {$sort: {name: 1}} +project("name", "netPrice"), sort(ASC, "name") + +// generates {$project: {name: $firstname}}, {$sort: {name: 1}} +project().and("firstname").as("name"), sort(ASC, "name") + +// does not work +project().and("firstname").as("name"), sort(ASC, "firstname") +---- +==== + +More examples for project operations can be found in the `AggregationTests` class. Note that further details regarding the projection expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/project/#pipe._S_project[corresponding section] of the MongoDB Aggregation Framework reference documentation. + +[[mongo.aggregation.facet]] +=== Faceted Classification + +As of Version 3.4, MongoDB supports faceted classification by using the Aggregation Framework. A faceted classification uses semantic categories (either general or subject-specific) that are combined to create the full classification entry. Documents flowing through the aggregation pipeline are classified into buckets. A multi-faceted classification enables various aggregations on the same set of input documents, without needing to retrieve the input documents multiple times. + +==== Buckets + +Bucket operations categorize incoming documents into groups, called buckets, based on a specified expression and bucket boundaries. Bucket operations require a grouping field or a grouping expression. You can define them by using the `bucket()` and `bucketAuto()` methods of the `Aggregate` class. `BucketOperation` and `BucketAutoOperation` can expose accumulations based on aggregation expressions for input documents. You can extend the bucket operation with additional parameters through a fluent API by using the `with…()` methods and the `andOutput(String)` method. You can alias the operation by using the `as(String)` method. Each bucket is represented as a document in the output. + +`BucketOperation` takes a defined set of boundaries to group incoming documents into these categories. Boundaries are required to be sorted. The following listing shows some examples of bucket operations: + +.Bucket operation examples +==== +[source,java] +---- +// generates {$bucket: {groupBy: $price, boundaries: [0, 100, 400]}} +bucket("price").withBoundaries(0, 100, 400); + +// generates {$bucket: {groupBy: $price, default: "Other" boundaries: [0, 100]}} +bucket("price").withBoundaries(0, 100).withDefault("Other"); + +// generates {$bucket: {groupBy: $price, boundaries: [0, 100], output: { count: { $sum: 1}}}} +bucket("price").withBoundaries(0, 100).andOutputCount().as("count"); + +// generates {$bucket: {groupBy: $price, boundaries: [0, 100], 5, output: { titles: { $push: "$title"}}} +bucket("price").withBoundaries(0, 100).andOutput("title").push().as("titles"); +---- +==== + +`BucketAutoOperation` determines boundaries in an attempt to evenly distribute documents into a specified number of buckets. `BucketAutoOperation` optionally takes a granularity value that specifies the https://en.wikipedia.org/wiki/Preferred_number[preferred number] series to use to ensure that the calculated boundary edges end on preferred round numbers or on powers of 10. The following listing shows examples of bucket operations: + +.Bucket operation examples +==== +[source,java] +---- +// generates {$bucketAuto: {groupBy: $price, buckets: 5}} +bucketAuto("price", 5) + +// generates {$bucketAuto: {groupBy: $price, buckets: 5, granularity: "E24"}} +bucketAuto("price", 5).withGranularity(Granularities.E24).withDefault("Other"); + +// generates {$bucketAuto: {groupBy: $price, buckets: 5, output: { titles: { $push: "$title"}}} +bucketAuto("price", 5).andOutput("title").push().as("titles"); +---- +==== + +To create output fields in buckets, bucket operations can use `AggregationExpression` through `andOutput()` and <> through `andOutputExpression()`. + +Note that further details regarding bucket expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/bucket/[`$bucket` section] and +https://docs.mongodb.org/manual/reference/operator/aggregation/bucketAuto/[`$bucketAuto` section] of the MongoDB Aggregation Framework reference documentation. + +==== Multi-faceted Aggregation + +Multiple aggregation pipelines can be used to create multi-faceted aggregations that characterize data across multiple dimensions (or facets) within a single aggregation stage. Multi-faceted aggregations provide multiple filters and categorizations to guide data browsing and analysis. A common implementation of faceting is how many online retailers provide ways to narrow down search results by applying filters on product price, manufacturer, size, and other factors. + +You can define a `FacetOperation` by using the `facet()` method of the `Aggregation` class. You can customize it with multiple aggregation pipelines by using the `and()` method. Each sub-pipeline has its own field in the output document where its results are stored as an array of documents. + +Sub-pipelines can project and filter input documents prior to grouping. Common use cases include extraction of date parts or calculations before categorization. The following listing shows facet operation examples: + +.Facet operation examples +==== +[source,java] +---- +// generates {$facet: {categorizedByPrice: [ { $match: { price: {$exists : true}}}, { $bucketAuto: {groupBy: $price, buckets: 5}}]}} +facet(match(Criteria.where("price").exists(true)), bucketAuto("price", 5)).as("categorizedByPrice")) + +// generates {$facet: {categorizedByCountry: [ { $match: { country: {$exists : true}}}, { $sortByCount: "$country"}]}} +facet(match(Criteria.where("country").exists(true)), sortByCount("country")).as("categorizedByCountry")) + +// generates {$facet: {categorizedByYear: [ +// { $project: { title: 1, publicationYear: { $year: "publicationDate"}}}, +// { $bucketAuto: {groupBy: $price, buckets: 5, output: { titles: {$push:"$title"}}} +// ]}} +facet(project("title").and("publicationDate").extractYear().as("publicationYear"), + bucketAuto("publicationYear", 5).andOutput("title").push().as("titles")) + .as("categorizedByYear")) +---- +==== + +Note that further details regarding facet operation can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/facet/[`$facet` section] of the MongoDB Aggregation Framework reference documentation. + +[[mongo.aggregation.sort-by-count]] +==== Sort By Count + +Sort by count operations group incoming documents based on the value of a specified expression, compute the count of documents in each distinct group, and sort the results by count. It offers a handy shortcut to apply sorting when using <>. Sort by count operations require a grouping field or grouping expression. The following listing shows a sort by count example: + +.Sort by count example +==== +[source,java] +---- +// generates { $sortByCount: "$country" } +sortByCount("country"); +---- +==== + +A sort by count operation is equivalent to the following BSON (Binary JSON): + +---- +{ $group: { _id: , count: { $sum: 1 } } }, +{ $sort: { count: -1 } } +---- + +[[mongo.aggregation.projection.expressions]] +==== Spring Expression Support in Projection Expressions + +We support the use of SpEL expressions in projection expressions through the `andExpression` method of the `ProjectionOperation` and `BucketOperation` classes. This feature lets you define the desired expression as a SpEL expression. On running a query, the SpEL expression is translated into a corresponding MongoDB projection expression part. This arrangement makes it much easier to express complex calculations. + +===== Complex Calculations with SpEL expressions + +Consider the following SpEL expression: + +[source,java] +---- +1 + (q + 1) / (q - 1) +---- + +The preceding expression is translated into the following projection expression part: + +[source,javascript] +---- +{ "$add" : [ 1, { + "$divide" : [ { + "$add":["$q", 1]}, { + "$subtract":[ "$q", 1]} + ] +}]} +---- + +You can see examples in more context in <> and <>. You can find more usage examples for supported SpEL expression constructs in `SpelExpressionTransformerUnitTests`. The following table shows the SpEL transformations supported by Spring Data MongoDB: + +.Supported SpEL transformations +[%header,cols="2"] +|=== +| SpEL Expression +| Mongo Expression Part +| a == b +| { $eq : [$a, $b] } +| a != b +| { $ne : [$a , $b] } +| a > b +| { $gt : [$a, $b] } +| a >= b +| { $gte : [$a, $b] } +| a < b +| { $lt : [$a, $b] } +| a <= b +| { $lte : [$a, $b] } +| a + b +| { $add : [$a, $b] } +| a - b +| { $subtract : [$a, $b] } +| a * b +| { $multiply : [$a, $b] } +| a / b +| { $divide : [$a, $b] } +| a^b +| { $pow : [$a, $b] } +| a % b +| { $mod : [$a, $b] } +| a && b +| { $and : [$a, $b] } +| a \|\| b +| { $or : [$a, $b] } +| !a +| { $not : [$a] } +|=== + +In addition to the transformations shown in the preceding table, you can use standard SpEL operations such as `new` to (for example) create arrays and reference expressions through their names (followed by the arguments to use in brackets). The following example shows how to create an array in this fashion: + +[source,java] +---- +// { $setEquals : [$a, [5, 8, 13] ] } +.andExpression("setEquals(a, new int[]{5, 8, 13})"); +---- + +[[mongo.aggregation.examples]] +==== Aggregation Framework Examples + +The examples in this section demonstrate the usage patterns for the MongoDB Aggregation Framework with Spring Data MongoDB. + +[[mongo.aggregation.examples.example1]] +===== Aggregation Framework Example 1 + +In this introductory example, we want to aggregate a list of tags to get the occurrence count of a particular tag from a MongoDB collection (called `tags`) sorted by the occurrence count in descending order. This example demonstrates the usage of grouping, sorting, projections (selection), and unwinding (result splitting). + +[source,java] +---- +class TagCount { + String tag; + int n; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +Aggregation agg = newAggregation( + project("tags"), + unwind("tags"), + group("tags").count().as("n"), + project("n").and("tag").previousOperation(), + sort(DESC, "n") +); + +AggregationResults results = mongoTemplate.aggregate(agg, "tags", TagCount.class); +List tagCount = results.getMappedResults(); +---- + +The preceding listing uses the following algorithm: + +. Create a new aggregation by using the `newAggregation` static factory method, to which we pass a list of aggregation operations. These aggregate operations define the aggregation pipeline of our `Aggregation`. +. Use the `project` operation to select the `tags` field (which is an array of strings) from the input collection. +. Use the `unwind` operation to generate a new document for each tag within the `tags` array. +. Use the `group` operation to define a group for each `tags` value for which we aggregate the occurrence count (by using the `count` aggregation operator and collecting the result in a new field called `n`). +. Select the `n` field and create an alias for the ID field generated from the previous group operation (hence the call to `previousOperation()`) with a name of `tag`. +. Use the `sort` operation to sort the resulting list of tags by their occurrence count in descending order. +. Call the `aggregate` method on `MongoTemplate` to let MongoDB perform the actual aggregation operation, with the created `Aggregation` as an argument. + +Note that the input collection is explicitly specified as the `tags` parameter to the `aggregate` Method. If the name of the input collection is not specified explicitly, it is derived from the input class passed as the first parameter to the `newAggreation` method. + +[[mongo.aggregation.examples.example2]] +===== Aggregation Framework Example 2 + +This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state[Largest and Smallest Cities by State] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return the smallest and largest cities by population for each state by using the aggregation framework. This example demonstrates grouping, sorting, and projections (selection). + +[source,java] +---- +class ZipInfo { + String id; + String city; + String state; + @Field("pop") int population; + @Field("loc") double[] location; +} + +class City { + String name; + int population; +} + +class ZipInfoStats { + String id; + String state; + City biggestCity; + City smallestCity; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation aggregation = newAggregation(ZipInfo.class, + group("state", "city") + .sum("population").as("pop"), + sort(ASC, "pop", "state", "city"), + group("state") + .last("city").as("biggestCity") + .last("pop").as("biggestPop") + .first("city").as("smallestCity") + .first("pop").as("smallestPop"), + project() + .and("state").previousOperation() + .and("biggestCity") + .nested(bind("name", "biggestCity").and("population", "biggestPop")) + .and("smallestCity") + .nested(bind("name", "smallestCity").and("population", "smallestPop")), + sort(ASC, "state") +); + +AggregationResults result = mongoTemplate.aggregate(aggregation, ZipInfoStats.class); +ZipInfoStats firstZipInfoStats = result.getMappedResults().get(0); +---- + +Note that the `ZipInfo` class maps the structure of the given input-collection. The `ZipInfoStats` class defines the structure in the desired output format. + +The preceding listings use the following algorithm: + +. Use the `group` operation to define a group from the input-collection. The grouping criteria is the combination of the `state` and `city` fields, which forms the ID structure of the group. We aggregate the value of the `population` property from the grouped elements by using the `sum` operator and save the result in the `pop` field. +. Use the `sort` operation to sort the intermediate-result by the `pop`, `state` and `city` fields, in ascending order, such that the smallest city is at the top and the biggest city is at the bottom of the result. Note that the sorting on `state` and `city` is implicitly performed against the group ID fields (which Spring Data MongoDB handled). +. Use a `group` operation again to group the intermediate result by `state`. Note that `state` again implicitly references a group ID field. We select the name and the population count of the biggest and smallest city with calls to the `last(…)` and `first(...)` operators, respectively, in the `project` operation. +. Select the `state` field from the previous `group` operation. Note that `state` again implicitly references a group ID field. Because we do not want an implicitly generated ID to appear, we exclude the ID from the previous operation by using `and(previousOperation()).exclude()`. Because we want to populate the nested `City` structures in our output class, we have to emit appropriate sub-documents by using the nested method. +. Sort the resulting list of `StateStats` by their state name in ascending order in the `sort` operation. + +Note that we derive the name of the input collection from the `ZipInfo` class passed as the first parameter to the `newAggregation` method. + +[[mongo.aggregation.examples.example3]] +===== Aggregation Framework Example 3 + +This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#states-with-populations-over-10-million[States with Populations Over 10 Million] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return all states with a population greater than 10 million, using the aggregation framework. This example demonstrates grouping, sorting, and matching (filtering). + +[source,java] +---- +class StateStats { + @Id String id; + String state; + @Field("totalPop") int totalPopulation; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation agg = newAggregation(ZipInfo.class, + group("state").sum("population").as("totalPop"), + sort(ASC, previousOperation(), "totalPop"), + match(where("totalPop").gte(10 * 1000 * 1000)) +); + +AggregationResults result = mongoTemplate.aggregate(agg, StateStats.class); +List stateStatsList = result.getMappedResults(); +---- + +The preceding listings use the following algorithm: + +. Group the input collection by the `state` field and calculate the sum of the `population` field and store the result in the new field `"totalPop"`. +. Sort the intermediate result by the id-reference of the previous group operation in addition to the `"totalPop"` field in ascending order. +. Filter the intermediate result by using a `match` operation which accepts a `Criteria` query as an argument. + +Note that we derive the name of the input collection from the `ZipInfo` class passed as first parameter to the `newAggregation` method. + +[[mongo.aggregation.examples.example4]] +===== Aggregation Framework Example 4 + +This example demonstrates the use of simple arithmetic operations in the projection operation. + +[source,java] +---- +class Product { + String id; + String name; + double netPrice; + int spaceUnits; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation agg = newAggregation(Product.class, + project("name", "netPrice") + .and("netPrice").plus(1).as("netPricePlus1") + .and("netPrice").minus(1).as("netPriceMinus1") + .and("netPrice").multiply(1.19).as("grossPrice") + .and("netPrice").divide(2).as("netPriceDiv2") + .and("spaceUnits").mod(2).as("spaceUnitsMod2") +); + +AggregationResults result = mongoTemplate.aggregate(agg, Document.class); +List resultList = result.getMappedResults(); +---- + +Note that we derive the name of the input collection from the `Product` class passed as first parameter to the `newAggregation` method. + +[[mongo.aggregation.examples.example5]] +===== Aggregation Framework Example 5 + +This example demonstrates the use of simple arithmetic operations derived from SpEL Expressions in the projection operation. + +[source,java] +---- +class Product { + String id; + String name; + double netPrice; + int spaceUnits; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation agg = newAggregation(Product.class, + project("name", "netPrice") + .andExpression("netPrice + 1").as("netPricePlus1") + .andExpression("netPrice - 1").as("netPriceMinus1") + .andExpression("netPrice / 2").as("netPriceDiv2") + .andExpression("netPrice * 1.19").as("grossPrice") + .andExpression("spaceUnits % 2").as("spaceUnitsMod2") + .andExpression("(netPrice * 0.8 + 1.2) * 1.19").as("grossPriceIncludingDiscountAndCharge") + +); + +AggregationResults result = mongoTemplate.aggregate(agg, Document.class); +List resultList = result.getMappedResults(); +---- + +[[mongo.aggregation.examples.example6]] +===== Aggregation Framework Example 6 + +This example demonstrates the use of complex arithmetic operations derived from SpEL Expressions in the projection operation. + +Note: The additional parameters passed to the `addExpression` method can be referenced with indexer expressions according to their position. In this example, we reference the first parameter of the parameters array with `[0]`. When the SpEL expression is transformed into a MongoDB aggregation framework expression, external parameter expressions are replaced with their respective values. + +[source,java] +---- +class Product { + String id; + String name; + double netPrice; + int spaceUnits; +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +double shippingCosts = 1.2; + +TypedAggregation agg = newAggregation(Product.class, + project("name", "netPrice") + .andExpression("(netPrice * (1-discountRate) + [0]) * (1+taxRate)", shippingCosts).as("salesPrice") +); + +AggregationResults result = mongoTemplate.aggregate(agg, Document.class); +List resultList = result.getMappedResults(); +---- + +Note that we can also refer to other fields of the document within the SpEL expression. + +[[mongo.aggregation.examples.example7]] +===== Aggregation Framework Example 7 + +This example uses conditional projection. It is derived from the https://docs.mongodb.com/manual/reference/operator/aggregation/cond/[$cond reference documentation]. + +[source,java] +---- +public class InventoryItem { + + @Id int id; + String item; + String description; + int qty; +} + +public class InventoryItemProjection { + + @Id int id; + String item; + String description; + int qty; + int discount +} +---- + +[source,java] +---- +import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; + +TypedAggregation agg = newAggregation(InventoryItem.class, + project("item").and("discount") + .applyCondition(ConditionalOperator.newBuilder().when(Criteria.where("qty").gte(250)) + .then(30) + .otherwise(20)) + .and(ifNull("description", "Unspecified")).as("description") +); + +AggregationResults result = mongoTemplate.aggregate(agg, "inventory", InventoryItemProjection.class); +List stateStatsList = result.getMappedResults(); +---- + +This one-step aggregation uses a projection operation with the `inventory` collection. We project the `discount` field by using a conditional operation for all inventory items that have a `qty` greater than or equal to `250`. A second conditional projection is performed for the `description` field. We apply the `Unspecified` description to all items that either do not have a `description` field or items that have a `null` description. + +As of MongoDB 3.6, it is possible to exclude fields from the projection by using a conditional expression. + +.Conditional aggregation projection +==== +[source,java] +---- +TypedAggregation agg = Aggregation.newAggregation(Book.class, + project("title") + .and(ConditionalOperators.when(ComparisonOperators.valueOf("author.middle") <1> + .equalToValue("")) <2> + .then("$$REMOVE") <3> + .otherwiseValueOf("author.middle") <4> + ) + .as("author.middle")); +---- +<1> If the value of the field `author.middle` +<2> does not contain a value, +<3> then use https://docs.mongodb.com/manual/reference/aggregation-variables/#variable.REMOVE[``$$REMOVE``] to exclude the field. +<4> Otherwise, add the field value of `author.middle`. +==== diff --git a/src/main/asciidoc/reference/gridfs.adoc b/src/main/asciidoc/reference/gridfs.adoc new file mode 100644 index 0000000000..94caf11f47 --- /dev/null +++ b/src/main/asciidoc/reference/gridfs.adoc @@ -0,0 +1,115 @@ +[[gridfs]] +== GridFS Support + +MongoDB supports storing binary files inside its filesystem, GridFS. Spring Data MongoDB provides a `GridFsOperations` interface as well as the corresponding implementation, `GridFsTemplate`, to let you interact with the filesystem. You can set up a `GridFsTemplate` instance by handing it a `MongoDatabaseFactory` as well as a `MongoConverter`, as the following example shows: + +.JavaConfig setup for a GridFsTemplate +==== +[source,java] +---- +class GridFsConfiguration extends AbstractMongoClientConfiguration { + + // … further configuration omitted + + @Bean + public GridFsTemplate gridFsTemplate() { + return new GridFsTemplate(mongoDbFactory(), mappingMongoConverter()); + } +} +---- +==== + +The corresponding XML configuration follows: + +.XML configuration for a GridFsTemplate +==== +[source,xml] +---- + + + + + + + + + + + + +---- +==== + +The template can now be injected and used to perform storage and retrieval operations, as the following example shows: + +.Using GridFsTemplate to store files +==== +[source,java] +---- +class GridFsClient { + + @Autowired + GridFsOperations operations; + + @Test + public void storeFileToGridFs() { + + FileMetadata metadata = new FileMetadata(); + // populate metadata + Resource file = … // lookup File or Resource + + operations.store(file.getInputStream(), "filename.txt", metadata); + } +} +---- +==== + +The `store(…)` operations take an `InputStream`, a filename, and (optionally) metadata information about the file to store. The metadata can be an arbitrary object, which will be marshaled by the `MongoConverter` configured with the `GridFsTemplate`. Alternatively, you can also provide a `Document`. + +You can read files from the filesystem through either the `find(…)` or the `getResources(…)` methods. Let's have a look at the `find(…)` methods first. You can either find a single file or multiple files that match a `Query`. You can use the `GridFsCriteria` helper class to define queries. It provides static factory methods to encapsulate default metadata fields (such as `whereFilename()` and `whereContentType()`) or a custom one through `whereMetaData()`. The following example shows how to use `GridFsTemplate` to query for files: + +.Using GridFsTemplate to query for files +==== +[source,java] +---- +class GridFsClient { + + @Autowired + GridFsOperations operations; + + @Test + public void findFilesInGridFs() { + GridFSFindIterable result = operations.find(query(whereFilename().is("filename.txt"))) + } +} +---- +==== + +NOTE: Currently, MongoDB does not support defining sort criteria when retrieving files from GridFS. For this reason, any sort criteria defined on the `Query` instance handed into the `find(…)` method are disregarded. + +The other option to read files from the GridFs is to use the methods introduced by the `ResourcePatternResolver` interface. They allow handing an Ant path into the method and can thus retrieve files matching the given pattern. The following example shows how to use `GridFsTemplate` to read files: + +.Using GridFsTemplate to read files +==== +[source,java] +---- +class GridFsClient { + + @Autowired + GridFsOperations operations; + + @Test + public void readFilesFromGridFs() { + GridFsResources[] txtFiles = operations.getResources("*.txt"); + } +} +---- +==== + +`GridFsOperations` extends `ResourcePatternResolver` and lets the `GridFsTemplate` (for example) to be plugged into an `ApplicationContext` to read Spring Config files from MongoDB database. diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc index daaad49963..f214edba4c 100644 --- a/src/main/asciidoc/reference/mongodb.adoc +++ b/src/main/asciidoc/reference/mongodb.adoc @@ -1002,7 +1002,7 @@ assertThat(upserted.getFirstName()).isEqualTo("Mary"); assertThat(upserted.getAge()).isOne(); ---- -= [[mongo-template.aggregation-update]] +[[mongo-template.aggregation-update]] === Aggregation Pipeline Updates Update methods exposed by `MongoOperations` and `ReactiveMongoOperations` also accept an <> via `AggregationUpdate`. @@ -2419,662 +2419,7 @@ GroupByResults results = mongoTemplate.group(where("x").gt(0), keyFunction("classpath:keyFunction.js").initialDocument("{ count: 0 }").reduceFunction("classpath:groupReduce.js"), XObject.class); ---- -[[mongo.aggregation]] -== Aggregation Framework Support - -Spring Data MongoDB provides support for the Aggregation Framework introduced to MongoDB in version 2.2. - -For further information, see the full https://docs.mongodb.org/manual/aggregation/[reference documentation] of the aggregation framework and other data aggregation tools for MongoDB. - -[[mongo.aggregation.basic-concepts]] -=== Basic Concepts - -The Aggregation Framework support in Spring Data MongoDB is based on the following key abstractions: `Aggregation`, `AggregationDefinition`, and `AggregationResults`. - -* `Aggregation` -+ -An `Aggregation` represents a MongoDB `aggregate` operation and holds the description of the aggregation pipeline instructions. Aggregations are created by invoking the appropriate `newAggregation(…)` static factory method of the `Aggregation` class, which takes a list of `AggregateOperation` and an optional input class. -+ -The actual aggregate operation is run by the `aggregate` method of the `MongoTemplate`, which takes the desired output class as a parameter. -+ -* `TypedAggregation` -+ -A `TypedAggregation`, just like an `Aggregation`, holds the instructions of the aggregation pipeline and a reference to the input type, that is used for mapping domain properties to actual document fields. -+ -At runtime, field references get checked against the given input type, considering potential `@Field` annotations. -[NOTE] -==== -Changed in 3.2 referencing non-existent properties does no longer raise errors. To restore the previous behaviour use the `strictMapping` option of `AggregationOptions`. -==== -* `AggregationDefinition` -+ -An `AggregationDefinition` represents a MongoDB aggregation pipeline operation and describes the processing that should be performed in this aggregation step. Although you could manually create an `AggregationDefinition`, we recommend using the static factory methods provided by the `Aggregate` class to construct an `AggregateOperation`. -+ -* `AggregationResults` -+ -`AggregationResults` is the container for the result of an aggregate operation. It provides access to the raw aggregation result, in the form of a `Document` to the mapped objects and other information about the aggregation. -+ -The following listing shows the canonical example for using the Spring Data MongoDB support for the MongoDB Aggregation Framework: -+ -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -Aggregation agg = newAggregation( - pipelineOP1(), - pipelineOP2(), - pipelineOPn() -); - -AggregationResults results = mongoTemplate.aggregate(agg, "INPUT_COLLECTION_NAME", OutputType.class); -List mappedResult = results.getMappedResults(); ----- - -Note that, if you provide an input class as the first parameter to the `newAggregation` method, the `MongoTemplate` derives the name of the input collection from this class. Otherwise, if you do not not specify an input class, you must provide the name of the input collection explicitly. If both an input class and an input collection are provided, the latter takes precedence. - -[[mongo.aggregation.supported-aggregation-operations]] -=== Supported Aggregation Operations - -The MongoDB Aggregation Framework provides the following types of aggregation operations: - -* Pipeline Aggregation Operators -* Group Aggregation Operators -* Boolean Aggregation Operators -* Comparison Aggregation Operators -* Arithmetic Aggregation Operators -* String Aggregation Operators -* Date Aggregation Operators -* Array Aggregation Operators -* Conditional Aggregation Operators -* Lookup Aggregation Operators -* Convert Aggregation Operators -* Object Aggregation Operators -* Script Aggregation Operators - -At the time of this writing, we provide support for the following Aggregation Operations in Spring Data MongoDB: - -.Aggregation Operations currently supported by Spring Data MongoDB -[cols="2*"] -|=== -| Pipeline Aggregation Operators -| `bucket`, `bucketAuto`, `count`, `facet`, `geoNear`, `graphLookup`, `group`, `limit`, `lookup`, `match`, `project`, `replaceRoot`, `skip`, `sort`, `unwind` - -| Set Aggregation Operators -| `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue` - -| Group/Accumulator Aggregation Operators -| `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp` - -| Arithmetic Aggregation Operators -| `abs`, `add` (via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (*via `minus`), `trunc` - -| Document Operators -| `rank`, `denseRank`, `documentNumber`, `shift` - -| String Aggregation Operators -| `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` - -| Comparison Aggregation Operators -| `eq` (via: `is`), `gt`, `gte`, `lt`, `lte`, `ne` - -| Array Aggregation Operators -| `arrayElementAt`, `arrayToObject`, `concatArrays`, `filter`, `in`, `indexOfArray`, `isArray`, `range`, `reverseArray`, `reduce`, `size`, `slice`, `zip` - -| Literal Operators -| `literal` - -| Date Aggregation Operators -| `dayOfYear`, `dayOfMonth`, `dayOfWeek`, `year`, `month`, `week`, `hour`, `minute`, `second`, `millisecond`, `dateToString`, `dateFromString`, `dateFromParts`, `dateToParts`, `isoDayOfWeek`, `isoWeek`, `isoWeekYear` - -| Variable Operators -| `map` - -| Conditional Aggregation Operators -| `cond`, `ifNull`, `switch` - -| Type Aggregation Operators -| `type` - -| Convert Aggregation Operators -| `convert`, `toBool`, `toDate`, `toDecimal`, `toDouble`, `toInt`, `toLong`, `toObjectId`, `toString` - -| Object Aggregation Operators -| `objectToArray`, `mergeObjects` - -| Script Aggregation Operators -| `function`, `accumulator` -|=== - -* The operation is mapped or added by Spring Data MongoDB. - -Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB.Comparison aggregation operators are expressed as `Criteria` expressions. - -[[mongo.aggregation.projection]] -=== Projection Expressions - -Projection expressions are used to define the fields that are the outcome of a particular aggregation step. Projection expressions can be defined through the `project` method of the `Aggregation` class, either by passing a list of `String` objects or an aggregation framework `Fields` object. The projection can be extended with additional fields through a fluent API by using the `and(String)` method and aliased by using the `as(String)` method. -Note that you can also define fields with aliases by using the `Fields.field` static factory method of the aggregation framework, which you can then use to construct a new `Fields` instance. References to projected fields in later aggregation stages are valid only for the field names of included fields or their aliases (including newly defined fields and their aliases). Fields not included in the projection cannot be referenced in later aggregation stages. The following listings show examples of projection expression: - -.Projection expression examples -==== -[source,java] ----- -// generates {$project: {name: 1, netPrice: 1}} -project("name", "netPrice") - -// generates {$project: {thing1: $thing2}} -project().and("thing1").as("thing2") - -// generates {$project: {a: 1, b: 1, thing2: $thing1}} -project("a","b").and("thing1").as("thing2") ----- -==== - -.Multi-Stage Aggregation using Projection and Sorting -==== -[source,java] ----- -// generates {$project: {name: 1, netPrice: 1}}, {$sort: {name: 1}} -project("name", "netPrice"), sort(ASC, "name") - -// generates {$project: {name: $firstname}}, {$sort: {name: 1}} -project().and("firstname").as("name"), sort(ASC, "name") - -// does not work -project().and("firstname").as("name"), sort(ASC, "firstname") ----- -==== - -More examples for project operations can be found in the `AggregationTests` class. Note that further details regarding the projection expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/project/#pipe._S_project[corresponding section] of the MongoDB Aggregation Framework reference documentation. - -[[mongo.aggregation.facet]] -=== Faceted Classification - -As of Version 3.4, MongoDB supports faceted classification by using the Aggregation Framework. A faceted classification uses semantic categories (either general or subject-specific) that are combined to create the full classification entry. Documents flowing through the aggregation pipeline are classified into buckets. A multi-faceted classification enables various aggregations on the same set of input documents, without needing to retrieve the input documents multiple times. - -==== Buckets - -Bucket operations categorize incoming documents into groups, called buckets, based on a specified expression and bucket boundaries. Bucket operations require a grouping field or a grouping expression. You can define them by using the `bucket()` and `bucketAuto()` methods of the `Aggregate` class. `BucketOperation` and `BucketAutoOperation` can expose accumulations based on aggregation expressions for input documents. You can extend the bucket operation with additional parameters through a fluent API by using the `with…()` methods and the `andOutput(String)` method. You can alias the operation by using the `as(String)` method. Each bucket is represented as a document in the output. - -`BucketOperation` takes a defined set of boundaries to group incoming documents into these categories. Boundaries are required to be sorted. The following listing shows some examples of bucket operations: - -.Bucket operation examples -==== -[source,java] ----- -// generates {$bucket: {groupBy: $price, boundaries: [0, 100, 400]}} -bucket("price").withBoundaries(0, 100, 400); - -// generates {$bucket: {groupBy: $price, default: "Other" boundaries: [0, 100]}} -bucket("price").withBoundaries(0, 100).withDefault("Other"); - -// generates {$bucket: {groupBy: $price, boundaries: [0, 100], output: { count: { $sum: 1}}}} -bucket("price").withBoundaries(0, 100).andOutputCount().as("count"); - -// generates {$bucket: {groupBy: $price, boundaries: [0, 100], 5, output: { titles: { $push: "$title"}}} -bucket("price").withBoundaries(0, 100).andOutput("title").push().as("titles"); ----- -==== - -`BucketAutoOperation` determines boundaries in an attempt to evenly distribute documents into a specified number of buckets. `BucketAutoOperation` optionally takes a granularity value that specifies the https://en.wikipedia.org/wiki/Preferred_number[preferred number] series to use to ensure that the calculated boundary edges end on preferred round numbers or on powers of 10. The following listing shows examples of bucket operations: - -.Bucket operation examples -==== -[source,java] ----- -// generates {$bucketAuto: {groupBy: $price, buckets: 5}} -bucketAuto("price", 5) - -// generates {$bucketAuto: {groupBy: $price, buckets: 5, granularity: "E24"}} -bucketAuto("price", 5).withGranularity(Granularities.E24).withDefault("Other"); - -// generates {$bucketAuto: {groupBy: $price, buckets: 5, output: { titles: { $push: "$title"}}} -bucketAuto("price", 5).andOutput("title").push().as("titles"); ----- -==== - -To create output fields in buckets, bucket operations can use `AggregationExpression` through `andOutput()` and <> through `andOutputExpression()`. - -Note that further details regarding bucket expressions can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/bucket/[`$bucket` section] and -https://docs.mongodb.org/manual/reference/operator/aggregation/bucketAuto/[`$bucketAuto` section] of the MongoDB Aggregation Framework reference documentation. - -==== Multi-faceted Aggregation - -Multiple aggregation pipelines can be used to create multi-faceted aggregations that characterize data across multiple dimensions (or facets) within a single aggregation stage. Multi-faceted aggregations provide multiple filters and categorizations to guide data browsing and analysis. A common implementation of faceting is how many online retailers provide ways to narrow down search results by applying filters on product price, manufacturer, size, and other factors. - -You can define a `FacetOperation` by using the `facet()` method of the `Aggregation` class. You can customize it with multiple aggregation pipelines by using the `and()` method. Each sub-pipeline has its own field in the output document where its results are stored as an array of documents. - -Sub-pipelines can project and filter input documents prior to grouping. Common use cases include extraction of date parts or calculations before categorization. The following listing shows facet operation examples: - -.Facet operation examples -==== -[source,java] ----- -// generates {$facet: {categorizedByPrice: [ { $match: { price: {$exists : true}}}, { $bucketAuto: {groupBy: $price, buckets: 5}}]}} -facet(match(Criteria.where("price").exists(true)), bucketAuto("price", 5)).as("categorizedByPrice")) - -// generates {$facet: {categorizedByCountry: [ { $match: { country: {$exists : true}}}, { $sortByCount: "$country"}]}} -facet(match(Criteria.where("country").exists(true)), sortByCount("country")).as("categorizedByCountry")) - -// generates {$facet: {categorizedByYear: [ -// { $project: { title: 1, publicationYear: { $year: "publicationDate"}}}, -// { $bucketAuto: {groupBy: $price, buckets: 5, output: { titles: {$push:"$title"}}} -// ]}} -facet(project("title").and("publicationDate").extractYear().as("publicationYear"), - bucketAuto("publicationYear", 5).andOutput("title").push().as("titles")) - .as("categorizedByYear")) ----- -==== - -Note that further details regarding facet operation can be found in the https://docs.mongodb.org/manual/reference/operator/aggregation/facet/[`$facet` section] of the MongoDB Aggregation Framework reference documentation. - -[[mongo.aggregation.sort-by-count]] -==== Sort By Count - -Sort by count operations group incoming documents based on the value of a specified expression, compute the count of documents in each distinct group, and sort the results by count. It offers a handy shortcut to apply sorting when using <>. Sort by count operations require a grouping field or grouping expression. The following listing shows a sort by count example: - -.Sort by count example -==== -[source,java] ----- -// generates { $sortByCount: "$country" } -sortByCount("country"); ----- -==== - -A sort by count operation is equivalent to the following BSON (Binary JSON): - ----- -{ $group: { _id: , count: { $sum: 1 } } }, -{ $sort: { count: -1 } } ----- - -[[mongo.aggregation.projection.expressions]] -==== Spring Expression Support in Projection Expressions - -We support the use of SpEL expressions in projection expressions through the `andExpression` method of the `ProjectionOperation` and `BucketOperation` classes. This feature lets you define the desired expression as a SpEL expression. On running a query, the SpEL expression is translated into a corresponding MongoDB projection expression part. This arrangement makes it much easier to express complex calculations. - -===== Complex Calculations with SpEL expressions - -Consider the following SpEL expression: - -[source,java] ----- -1 + (q + 1) / (q - 1) ----- - -The preceding expression is translated into the following projection expression part: - -[source,javascript] ----- -{ "$add" : [ 1, { - "$divide" : [ { - "$add":["$q", 1]}, { - "$subtract":[ "$q", 1]} - ] -}]} ----- - -You can see examples in more context in <> and <>. You can find more usage examples for supported SpEL expression constructs in `SpelExpressionTransformerUnitTests`. The following table shows the SpEL transformations supported by Spring Data MongoDB: - -.Supported SpEL transformations -[%header,cols="2"] -|=== -| SpEL Expression -| Mongo Expression Part -| a == b -| { $eq : [$a, $b] } -| a != b -| { $ne : [$a , $b] } -| a > b -| { $gt : [$a, $b] } -| a >= b -| { $gte : [$a, $b] } -| a < b -| { $lt : [$a, $b] } -| a <= b -| { $lte : [$a, $b] } -| a + b -| { $add : [$a, $b] } -| a - b -| { $subtract : [$a, $b] } -| a * b -| { $multiply : [$a, $b] } -| a / b -| { $divide : [$a, $b] } -| a^b -| { $pow : [$a, $b] } -| a % b -| { $mod : [$a, $b] } -| a && b -| { $and : [$a, $b] } -| a \|\| b -| { $or : [$a, $b] } -| !a -| { $not : [$a] } -|=== - -In addition to the transformations shown in the preceding table, you can use standard SpEL operations such as `new` to (for example) create arrays and reference expressions through their names (followed by the arguments to use in brackets). The following example shows how to create an array in this fashion: - -[source,java] ----- -// { $setEquals : [$a, [5, 8, 13] ] } -.andExpression("setEquals(a, new int[]{5, 8, 13})"); ----- - -[[mongo.aggregation.examples]] -==== Aggregation Framework Examples - -The examples in this section demonstrate the usage patterns for the MongoDB Aggregation Framework with Spring Data MongoDB. - -[[mongo.aggregation.examples.example1]] -===== Aggregation Framework Example 1 - -In this introductory example, we want to aggregate a list of tags to get the occurrence count of a particular tag from a MongoDB collection (called `tags`) sorted by the occurrence count in descending order. This example demonstrates the usage of grouping, sorting, projections (selection), and unwinding (result splitting). - -[source,java] ----- -class TagCount { - String tag; - int n; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -Aggregation agg = newAggregation( - project("tags"), - unwind("tags"), - group("tags").count().as("n"), - project("n").and("tag").previousOperation(), - sort(DESC, "n") -); - -AggregationResults results = mongoTemplate.aggregate(agg, "tags", TagCount.class); -List tagCount = results.getMappedResults(); ----- - -The preceding listing uses the following algorithm: - -. Create a new aggregation by using the `newAggregation` static factory method, to which we pass a list of aggregation operations. These aggregate operations define the aggregation pipeline of our `Aggregation`. -. Use the `project` operation to select the `tags` field (which is an array of strings) from the input collection. -. Use the `unwind` operation to generate a new document for each tag within the `tags` array. -. Use the `group` operation to define a group for each `tags` value for which we aggregate the occurrence count (by using the `count` aggregation operator and collecting the result in a new field called `n`). -. Select the `n` field and create an alias for the ID field generated from the previous group operation (hence the call to `previousOperation()`) with a name of `tag`. -. Use the `sort` operation to sort the resulting list of tags by their occurrence count in descending order. -. Call the `aggregate` method on `MongoTemplate` to let MongoDB perform the actual aggregation operation, with the created `Aggregation` as an argument. - -Note that the input collection is explicitly specified as the `tags` parameter to the `aggregate` Method. If the name of the input collection is not specified explicitly, it is derived from the input class passed as the first parameter to the `newAggreation` method. - -[[mongo.aggregation.examples.example2]] -===== Aggregation Framework Example 2 - -This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#largest-and-smallest-cities-by-state[Largest and Smallest Cities by State] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return the smallest and largest cities by population for each state by using the aggregation framework. This example demonstrates grouping, sorting, and projections (selection). - -[source,java] ----- -class ZipInfo { - String id; - String city; - String state; - @Field("pop") int population; - @Field("loc") double[] location; -} - -class City { - String name; - int population; -} - -class ZipInfoStats { - String id; - String state; - City biggestCity; - City smallestCity; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation aggregation = newAggregation(ZipInfo.class, - group("state", "city") - .sum("population").as("pop"), - sort(ASC, "pop", "state", "city"), - group("state") - .last("city").as("biggestCity") - .last("pop").as("biggestPop") - .first("city").as("smallestCity") - .first("pop").as("smallestPop"), - project() - .and("state").previousOperation() - .and("biggestCity") - .nested(bind("name", "biggestCity").and("population", "biggestPop")) - .and("smallestCity") - .nested(bind("name", "smallestCity").and("population", "smallestPop")), - sort(ASC, "state") -); - -AggregationResults result = mongoTemplate.aggregate(aggregation, ZipInfoStats.class); -ZipInfoStats firstZipInfoStats = result.getMappedResults().get(0); ----- - -Note that the `ZipInfo` class maps the structure of the given input-collection. The `ZipInfoStats` class defines the structure in the desired output format. - -The preceding listings use the following algorithm: - -. Use the `group` operation to define a group from the input-collection. The grouping criteria is the combination of the `state` and `city` fields, which forms the ID structure of the group. We aggregate the value of the `population` property from the grouped elements by using the `sum` operator and save the result in the `pop` field. -. Use the `sort` operation to sort the intermediate-result by the `pop`, `state` and `city` fields, in ascending order, such that the smallest city is at the top and the biggest city is at the bottom of the result. Note that the sorting on `state` and `city` is implicitly performed against the group ID fields (which Spring Data MongoDB handled). -. Use a `group` operation again to group the intermediate result by `state`. Note that `state` again implicitly references a group ID field. We select the name and the population count of the biggest and smallest city with calls to the `last(…)` and `first(...)` operators, respectively, in the `project` operation. -. Select the `state` field from the previous `group` operation. Note that `state` again implicitly references a group ID field. Because we do not want an implicitly generated ID to appear, we exclude the ID from the previous operation by using `and(previousOperation()).exclude()`. Because we want to populate the nested `City` structures in our output class, we have to emit appropriate sub-documents by using the nested method. -. Sort the resulting list of `StateStats` by their state name in ascending order in the `sort` operation. - -Note that we derive the name of the input collection from the `ZipInfo` class passed as the first parameter to the `newAggregation` method. - -[[mongo.aggregation.examples.example3]] -===== Aggregation Framework Example 3 - -This example is based on the https://docs.mongodb.org/manual/tutorial/aggregation-examples/#states-with-populations-over-10-million[States with Populations Over 10 Million] example from the MongoDB Aggregation Framework documentation. We added additional sorting to produce stable results with different MongoDB versions. Here we want to return all states with a population greater than 10 million, using the aggregation framework. This example demonstrates grouping, sorting, and matching (filtering). - -[source,java] ----- -class StateStats { - @Id String id; - String state; - @Field("totalPop") int totalPopulation; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation agg = newAggregation(ZipInfo.class, - group("state").sum("population").as("totalPop"), - sort(ASC, previousOperation(), "totalPop"), - match(where("totalPop").gte(10 * 1000 * 1000)) -); - -AggregationResults result = mongoTemplate.aggregate(agg, StateStats.class); -List stateStatsList = result.getMappedResults(); ----- - -The preceding listings use the following algorithm: - -. Group the input collection by the `state` field and calculate the sum of the `population` field and store the result in the new field `"totalPop"`. -. Sort the intermediate result by the id-reference of the previous group operation in addition to the `"totalPop"` field in ascending order. -. Filter the intermediate result by using a `match` operation which accepts a `Criteria` query as an argument. - -Note that we derive the name of the input collection from the `ZipInfo` class passed as first parameter to the `newAggregation` method. - -[[mongo.aggregation.examples.example4]] -===== Aggregation Framework Example 4 - -This example demonstrates the use of simple arithmetic operations in the projection operation. - -[source,java] ----- -class Product { - String id; - String name; - double netPrice; - int spaceUnits; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation agg = newAggregation(Product.class, - project("name", "netPrice") - .and("netPrice").plus(1).as("netPricePlus1") - .and("netPrice").minus(1).as("netPriceMinus1") - .and("netPrice").multiply(1.19).as("grossPrice") - .and("netPrice").divide(2).as("netPriceDiv2") - .and("spaceUnits").mod(2).as("spaceUnitsMod2") -); - -AggregationResults result = mongoTemplate.aggregate(agg, Document.class); -List resultList = result.getMappedResults(); ----- - -Note that we derive the name of the input collection from the `Product` class passed as first parameter to the `newAggregation` method. - -[[mongo.aggregation.examples.example5]] -===== Aggregation Framework Example 5 - -This example demonstrates the use of simple arithmetic operations derived from SpEL Expressions in the projection operation. - -[source,java] ----- -class Product { - String id; - String name; - double netPrice; - int spaceUnits; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation agg = newAggregation(Product.class, - project("name", "netPrice") - .andExpression("netPrice + 1").as("netPricePlus1") - .andExpression("netPrice - 1").as("netPriceMinus1") - .andExpression("netPrice / 2").as("netPriceDiv2") - .andExpression("netPrice * 1.19").as("grossPrice") - .andExpression("spaceUnits % 2").as("spaceUnitsMod2") - .andExpression("(netPrice * 0.8 + 1.2) * 1.19").as("grossPriceIncludingDiscountAndCharge") - -); - -AggregationResults result = mongoTemplate.aggregate(agg, Document.class); -List resultList = result.getMappedResults(); ----- - -[[mongo.aggregation.examples.example6]] -===== Aggregation Framework Example 6 - -This example demonstrates the use of complex arithmetic operations derived from SpEL Expressions in the projection operation. - -Note: The additional parameters passed to the `addExpression` method can be referenced with indexer expressions according to their position. In this example, we reference the first parameter of the parameters array with `[0]`. When the SpEL expression is transformed into a MongoDB aggregation framework expression, external parameter expressions are replaced with their respective values. - -[source,java] ----- -class Product { - String id; - String name; - double netPrice; - int spaceUnits; -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -double shippingCosts = 1.2; - -TypedAggregation agg = newAggregation(Product.class, - project("name", "netPrice") - .andExpression("(netPrice * (1-discountRate) + [0]) * (1+taxRate)", shippingCosts).as("salesPrice") -); - -AggregationResults result = mongoTemplate.aggregate(agg, Document.class); -List resultList = result.getMappedResults(); ----- - -Note that we can also refer to other fields of the document within the SpEL expression. - -[[mongo.aggregation.examples.example7]] -===== Aggregation Framework Example 7 - -This example uses conditional projection. It is derived from the https://docs.mongodb.com/manual/reference/operator/aggregation/cond/[$cond reference documentation]. - -[source,java] ----- -public class InventoryItem { - - @Id int id; - String item; - String description; - int qty; -} - -public class InventoryItemProjection { - - @Id int id; - String item; - String description; - int qty; - int discount -} ----- - -[source,java] ----- -import static org.springframework.data.mongodb.core.aggregation.Aggregation.*; - -TypedAggregation agg = newAggregation(InventoryItem.class, - project("item").and("discount") - .applyCondition(ConditionalOperator.newBuilder().when(Criteria.where("qty").gte(250)) - .then(30) - .otherwise(20)) - .and(ifNull("description", "Unspecified")).as("description") -); - -AggregationResults result = mongoTemplate.aggregate(agg, "inventory", InventoryItemProjection.class); -List stateStatsList = result.getMappedResults(); ----- - -This one-step aggregation uses a projection operation with the `inventory` collection. We project the `discount` field by using a conditional operation for all inventory items that have a `qty` greater than or equal to `250`. A second conditional projection is performed for the `description` field. We apply the `Unspecified` description to all items that either do not have a `description` field or items that have a `null` description. - -As of MongoDB 3.6, it is possible to exclude fields from the projection by using a conditional expression. - -.Conditional aggregation projection -==== -[source,java] ----- -TypedAggregation agg = Aggregation.newAggregation(Book.class, - project("title") - .and(ConditionalOperators.when(ComparisonOperators.valueOf("author.middle") <1> - .equalToValue("")) <2> - .then("$$REMOVE") <3> - .otherwiseValueOf("author.middle") <4> - ) - .as("author.middle")); ----- -<1> If the value of the field `author.middle` -<2> does not contain a value, -<3> then use https://docs.mongodb.com/manual/reference/aggregation-variables/#variable.REMOVE[``$$REMOVE``] to exclude the field. -<4> Otherwise, add the field value of `author.middle`. -==== +include::aggregation-framework.adoc[] [[mongo-template.index-and-collections]] == Index and Collection Management @@ -3267,122 +2612,7 @@ boolean hasIndex = template.execute("geolocation", new CollectionCallbackBoolean }); ---- -[[gridfs]] -== GridFS Support - -MongoDB supports storing binary files inside its filesystem, GridFS. Spring Data MongoDB provides a `GridFsOperations` interface as well as the corresponding implementation, `GridFsTemplate`, to let you interact with the filesystem. You can set up a `GridFsTemplate` instance by handing it a `MongoDatabaseFactory` as well as a `MongoConverter`, as the following example shows: - -.JavaConfig setup for a GridFsTemplate -==== -[source,java] ----- -class GridFsConfiguration extends AbstractMongoClientConfiguration { - - // … further configuration omitted - - @Bean - public GridFsTemplate gridFsTemplate() { - return new GridFsTemplate(mongoDbFactory(), mappingMongoConverter()); - } -} ----- -==== - -The corresponding XML configuration follows: - -.XML configuration for a GridFsTemplate -==== -[source,xml] ----- - - - - - - - - - - - - ----- -==== - -The template can now be injected and used to perform storage and retrieval operations, as the following example shows: - -.Using GridFsTemplate to store files -==== -[source,java] ----- -class GridFsClient { - - @Autowired - GridFsOperations operations; - - @Test - public void storeFileToGridFs() { - - FileMetadata metadata = new FileMetadata(); - // populate metadata - Resource file = … // lookup File or Resource - - operations.store(file.getInputStream(), "filename.txt", metadata); - } -} ----- -==== - -The `store(…)` operations take an `InputStream`, a filename, and (optionally) metadata information about the file to store. The metadata can be an arbitrary object, which will be marshaled by the `MongoConverter` configured with the `GridFsTemplate`. Alternatively, you can also provide a `Document`. - -You can read files from the filesystem through either the `find(…)` or the `getResources(…)` methods. Let's have a look at the `find(…)` methods first. You can either find a single file or multiple files that match a `Query`. You can use the `GridFsCriteria` helper class to define queries. It provides static factory methods to encapsulate default metadata fields (such as `whereFilename()` and `whereContentType()`) or a custom one through `whereMetaData()`. The following example shows how to use `GridFsTemplate` to query for files: - -.Using GridFsTemplate to query for files -==== -[source,java] ----- -class GridFsClient { - - @Autowired - GridFsOperations operations; - - @Test - public void findFilesInGridFs() { - GridFSFindIterable result = operations.find(query(whereFilename().is("filename.txt"))) - } -} ----- -==== - -NOTE: Currently, MongoDB does not support defining sort criteria when retrieving files from GridFS. For this reason, any sort criteria defined on the `Query` instance handed into the `find(…)` method are disregarded. - -The other option to read files from the GridFs is to use the methods introduced by the `ResourcePatternResolver` interface. They allow handing an Ant path into the method and can thus retrieve files matching the given pattern. The following example shows how to use `GridFsTemplate` to read files: - -.Using GridFsTemplate to read files -==== -[source,java] ----- -class GridFsClient { - - @Autowired - GridFsOperations operations; - - @Test - public void readFilesFromGridFs() { - GridFsResources[] txtFiles = operations.getResources("*.txt"); - } -} ----- -==== - -`GridFsOperations` extends `ResourcePatternResolver` and lets the `GridFsTemplate` (for example) to be plugged into an `ApplicationContext` to read Spring Config files from MongoDB database. - +include::gridfs.adoc[] include::tailable-cursors.adoc[] include::change-streams.adoc[] include::time-series.adoc[] From 75b5a548b61c3ea1fa70bb60cdc69ff6ee2c91c6 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 24 Aug 2021 11:23:42 +0200 Subject: [PATCH 085/983] Polishing. Fix asterisk callouts. See #3786 --- .../asciidoc/reference/aggregation-framework.adoc | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index a843af17f4..547b3b1530 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -57,7 +57,7 @@ Note that, if you provide an input class as the first parameter to the `newAggre The MongoDB Aggregation Framework provides the following types of aggregation operations: * Pipeline Aggregation Operators -* Group Aggregation Operators +* Group/Accumulator Aggregation Operators * Boolean Aggregation Operators * Comparison Aggregation Operators * Arithmetic Aggregation Operators @@ -82,19 +82,16 @@ At the time of this writing, we provide support for the following Aggregation Op | `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue` | Group/Accumulator Aggregation Operators -| `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `(*count)`, `stdDevPop`, `stdDevSamp` +| `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `count` (+++*+++), `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators -| `abs`, `add` (via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (*via `minus`), `trunc` - -| Document Operators -| `rank`, `denseRank`, `documentNumber`, `shift` +| `abs`, `add` (+++*+++ via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `trunc` | String Aggregation Operators | `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` | Comparison Aggregation Operators -| `eq` (via: `is`), `gt`, `gte`, `lt`, `lte`, `ne` +| `eq` (+++*+++ via `is`), `gt`, `gte`, `lt`, `lte`, `ne` | Array Aggregation Operators | `arrayElementAt`, `arrayToObject`, `concatArrays`, `filter`, `in`, `indexOfArray`, `isArray`, `range`, `reverseArray`, `reduce`, `size`, `slice`, `zip` @@ -124,9 +121,9 @@ At the time of this writing, we provide support for the following Aggregation Op | `function`, `accumulator` |=== -* The operation is mapped or added by Spring Data MongoDB. ++++*+++ The operation is mapped or added by Spring Data MongoDB. -Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB.Comparison aggregation operators are expressed as `Criteria` expressions. +Note that the aggregation operations not listed here are currently not supported by Spring Data MongoDB. Comparison aggregation operators are expressed as `Criteria` expressions. [[mongo.aggregation.projection]] === Projection Expressions From 82b33331fcc998cbd29421e9a6f1d59f3a7fd0a2 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Thu, 22 Jul 2021 14:08:06 +0200 Subject: [PATCH 086/983] Add support for `$derivative` aggregation operator. Closes: #3716 Original pull request: #3742. --- .../core/aggregation/ArithmeticOperators.java | 55 +++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 1 + .../ArithmeticOperatorsUnitTests.java | 7 +++ .../SpelExpressionTransformerUnitTests.java | 5 ++ 4 files changed, 68 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index b27e54d298..fe54f2434e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -26,7 +26,9 @@ import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevPop; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevSamp; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Sum; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.StringUtils; /** * Gateway to {@literal Arithmetic} aggregation operations that perform math operations on numbers. @@ -591,6 +593,31 @@ public Round roundToPlace(int place) { return round().place(place); } + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative() { + return derivative(null); + } + + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @param unit The time unit ({@literal week, day, hour, minute, second, millisecond}) to apply can be + * {@literal null}. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative(@Nullable String unit) { + + Derivative derivative = usesFieldRef() ? Derivative.derivativeOf(fieldReference) + : Derivative.derivativeOf(expression); + return StringUtils.hasText(unit) ? derivative.unit(unit) : derivative; + } + private boolean usesFieldRef() { return fieldReference != null; } @@ -1724,4 +1751,32 @@ protected String getMongoMethod() { return "$round"; } } + + public static class Derivative extends AbstractAggregationExpression { + + private Derivative(Object value) { + super(value); + } + + public static Derivative derivativeOf(String fieldReference) { + return new Derivative(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + public static Derivative derivativeOf(AggregationExpression expression) { + return new Derivative(Collections.singletonMap("input", expression)); + } + + public static Derivative derivativeOfValue(Number value) { + return new Derivative(Collections.singletonMap("input", value)); + } + + public Derivative unit(String unit) { + return new Derivative(append("unit", unit)); + } + + @Override + protected String getMongoMethod() { + return "$derivative"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 4052b2cbaa..928869e93b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -91,6 +91,7 @@ public class MethodReferenceNode extends ExpressionNode { map.put("subtract", arrayArgRef().forOperator("$subtract")); map.put("trunc", singleArgRef().forOperator("$trunc")); map.put("round", arrayArgRef().forOperator("$round")); + map.put("derivative", mapArgRef().forOperator("$derivative").mappingParametersTo("input", "unit")); // STRING OPERATORS map.put("concat", arrayArgRef().forOperator("$concat")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index 940a315239..b4f3cdadb6 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -59,4 +59,11 @@ void roundShouldWithPlaceFromExpression() { .toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo(new Document("$round", Arrays.asList("$field", new Document("$first", "$source")))); } + + @Test // GH-3716 + void rendersDerivativeCorrectly() { + + assertThat(valueOf("miles").derivative("hour").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $derivative: { input: \"$miles\", unit: \"hour\" } }")); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index ee55818018..2653c52f2d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -989,6 +989,11 @@ void rendersShiftWithDefault() { } @Nullable + @Test // GH-3716 + void shouldRenderDerivative() { + assertThat(transform("derivative(miles, 'hour')")).isEqualTo(Document.parse("{ \"$derivative\" : { input : '$miles', unit : 'hour'} }")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From 10c0203605b421d47764866303113b02dfdc4e3e Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 24 Aug 2021 14:29:46 +0200 Subject: [PATCH 087/983] Polishing. Accept window units in addition to plain strings. Document operator. See: #3716 Original pull request: #3742. --- .../core/aggregation/ArithmeticOperators.java | 21 ++++++++++++++++++- .../ArithmeticOperatorsUnitTests.java | 6 ++++-- .../reference/aggregation-framework.adoc | 3 ++- 3 files changed, 26 insertions(+), 4 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index fe54f2434e..39579fc7b6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -17,6 +17,7 @@ import java.util.Collections; import java.util.List; +import java.util.Locale; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Avg; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovariancePop; @@ -26,6 +27,8 @@ import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevPop; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevSamp; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Sum; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.WindowUnit; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.WindowUnits; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -34,6 +37,7 @@ * Gateway to {@literal Arithmetic} aggregation operations that perform math operations on numbers. * * @author Christoph Strobl + * @author Mark Paluch * @since 1.10 */ public class ArithmeticOperators { @@ -600,7 +604,22 @@ public Round roundToPlace(int place) { * @since 3.3 */ public Derivative derivative() { - return derivative(null); + return derivative((String) null); + } + + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @param unit The time unit ({@link WindowUnits#WEEK}, {@link WindowUnits#DAY}, {@link WindowUnits#HOUR}, + * {@link WindowUnits#MINUTE}, {@link WindowUnits#SECOND}, {@link WindowUnits#MILLISECOND}) to apply. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative(WindowUnit unit) { + + Assert.notNull(unit, "Window unit must not be null"); + + return derivative(unit.name().toLowerCase(Locale.ROOT)); } /** diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index b4f3cdadb6..da03bc5c61 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -28,8 +28,9 @@ * Unit tests for {@link Round}. * * @author Christoph Strobl + * @author Mark Paluch */ -public class ArithmeticOperatorsUnitTests { +class ArithmeticOperatorsUnitTests { @Test // DATAMONGO-2370 void roundShouldWithoutPlace() { @@ -63,7 +64,8 @@ void roundShouldWithPlaceFromExpression() { @Test // GH-3716 void rendersDerivativeCorrectly() { - assertThat(valueOf("miles").derivative("hour").toDocument(Aggregation.DEFAULT_CONTEXT)) + assertThat( + valueOf("miles").derivative(SetWindowFieldsOperation.WindowUnits.HOUR).toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo(Document.parse("{ $derivative: { input: \"$miles\", unit: \"hour\" } }")); } } diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index 547b3b1530..2624e6c27e 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -85,7 +85,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `count` (+++*+++), `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators -| `abs`, `add` (+++*+++ via `plus`), `ceil`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `trunc` +| `abs`, `add` (+++*+++ via `plus`), `ceil`, `derivative`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `trunc` | String Aggregation Operators | `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` @@ -119,6 +119,7 @@ At the time of this writing, we provide support for the following Aggregation Op | Script Aggregation Operators | `function`, `accumulator` + |=== +++*+++ The operation is mapped or added by Spring Data MongoDB. From 6bd0f758fed10eb92c55bd22da7f5b8b40b387a1 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Fri, 23 Jul 2021 10:10:18 +0200 Subject: [PATCH 088/983] Extend support for `$ifNull` to cover multiple conditions. Closes: #3720 Original pull request: #3745. --- .../aggregation/ConditionalOperators.java | 69 +++++++++++++++---- .../ConditionalOperatorsUnitTests.java | 35 ++++++++++ 2 files changed, 90 insertions(+), 14 deletions(-) create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java index 1d3890ce89..95a0290ec2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java @@ -17,6 +17,7 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; @@ -282,19 +283,29 @@ public Document toDocument(AggregationOperationContext context) { List list = new ArrayList(); - if (condition instanceof Field) { - list.add(context.getReference((Field) condition).toString()); - } else if (condition instanceof AggregationExpression) { - list.add(((AggregationExpression) condition).toDocument(context)); + if(condition instanceof Collection) { + for(Object val : ((Collection)this.condition)) { + list.add(mapCondition(val, context)); + } } else { - list.add(condition); + list.add(mapCondition(condition, context)); } list.add(resolve(value, context)); - return new Document("$ifNull", list); } + private Object mapCondition(Object condition, AggregationOperationContext context) { + + if (condition instanceof Field) { + return context.getReference((Field) condition).toString(); + } else if (condition instanceof AggregationExpression) { + return ((AggregationExpression) condition).toDocument(context); + } else { + return condition; + } + } + private Object resolve(Object value, AggregationOperationContext context) { if (value instanceof Field) { @@ -323,15 +334,34 @@ public interface IfNullBuilder { /** * @param expression the expression to check for a {@literal null} value, field name must not be {@literal null} * or empty. - * @return the {@link ThenBuilder} + * @return the {@link ThenBuilder}. */ ThenBuilder ifNull(AggregationExpression expression); } + /** + * @author Christoph Strobl + * @since 3.3 + */ + public interface OrBuilder { + + /** + * @param fieldReference the field to check for a {@literal null} value, field reference must not be {@literal null}. + * @return the {@link ThenBuilder} + */ + ThenBuilder orIfNull(String fieldReference); + + /** + * @param expression the expression to check for a {@literal null} value, + * @return the {@link ThenBuilder}. + */ + ThenBuilder orIfNull(AggregationExpression expression); + } + /** * @author Mark Paluch */ - public interface ThenBuilder { + public interface ThenBuilder extends OrBuilder { /** * @param value the value to be used if the {@code $ifNull} condition evaluates {@literal true}. Can be a @@ -361,9 +391,10 @@ public interface ThenBuilder { */ static final class IfNullOperatorBuilder implements IfNullBuilder, ThenBuilder { - private @Nullable Object condition; + private @Nullable List conditions; private IfNullOperatorBuilder() { + conditions = new ArrayList<>(); } /** @@ -381,7 +412,7 @@ public static IfNullOperatorBuilder newBuilder() { public ThenBuilder ifNull(String fieldReference) { Assert.hasText(fieldReference, "FieldReference name must not be null or empty!"); - this.condition = Fields.field(fieldReference); + this.conditions.add(Fields.field(fieldReference)); return this; } @@ -392,15 +423,25 @@ public ThenBuilder ifNull(String fieldReference) { public ThenBuilder ifNull(AggregationExpression expression) { Assert.notNull(expression, "AggregationExpression name must not be null or empty!"); - this.condition = expression; + this.conditions.add(expression); return this; } + @Override + public ThenBuilder orIfNull(String fieldReference) { + return ifNull(fieldReference); + } + + @Override + public ThenBuilder orIfNull(AggregationExpression expression) { + return ifNull(expression); + } + /* (non-Javadoc) * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.ThenBuilder#then(java.lang.Object) */ public IfNull then(Object value) { - return new IfNull(condition, value); + return new IfNull(conditions, value); } /* (non-Javadoc) @@ -409,7 +450,7 @@ public IfNull then(Object value) { public IfNull thenValueOf(String fieldReference) { Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new IfNull(condition, Fields.field(fieldReference)); + return new IfNull(conditions, Fields.field(fieldReference)); } /* (non-Javadoc) @@ -418,7 +459,7 @@ public IfNull thenValueOf(String fieldReference) { public IfNull thenValueOf(AggregationExpression expression) { Assert.notNull(expression, "Expression must not be null!"); - return new IfNull(condition, expression); + return new IfNull(conditions, expression); } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java new file mode 100644 index 0000000000..132600cbb5 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java @@ -0,0 +1,35 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.core.aggregation.ConditionalOperators.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +/** + * @author Christoph Strobl + */ +public class ConditionalOperatorsUnitTests { + + @Test // GH-3720 + void rendersIfNullWithMultipleConditionalValuesCorrectly() { + + assertThat(ifNull("description").orIfNull("quantity").then("Unspecified").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $ifNull: [ \"$description\", \"$quantity\", \"Unspecified\" ] }")); + } +} From fd0a402c99e402c6ef69538f34be515c12dc54d6 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 24 Aug 2021 14:34:05 +0200 Subject: [PATCH 089/983] Polishing. See #3720 Original pull request: #3745. --- .../aggregation/ConditionalOperators.java | 37 ++++++++++--------- .../ConditionalOperatorsUnitTests.java | 4 +- 2 files changed, 22 insertions(+), 19 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java index 95a0290ec2..1979ec78f4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java @@ -236,7 +236,7 @@ private boolean usesCriteriaDefinition() { * * @author Mark Paluch * @see https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/ + * "https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/">https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/ */ public static class IfNull implements AggregationExpression { @@ -252,7 +252,8 @@ private IfNull(Object condition, Object value) { /** * Creates new {@link IfNull}. * - * @param fieldReference the field to check for a {@literal null} value, field reference must not be {@literal null}. + * @param fieldReference the field to check for a {@literal null} value, field reference must not be + * {@literal null}. * @return never {@literal null}. */ public static ThenBuilder ifNull(String fieldReference) { @@ -265,7 +266,7 @@ public static ThenBuilder ifNull(String fieldReference) { * Creates new {@link IfNull}. * * @param expression the expression to check for a {@literal null} value, field reference must not be - * {@literal null}. + * {@literal null}. * @return never {@literal null}. */ public static ThenBuilder ifNull(AggregationExpression expression) { @@ -283,8 +284,8 @@ public Document toDocument(AggregationOperationContext context) { List list = new ArrayList(); - if(condition instanceof Collection) { - for(Object val : ((Collection)this.condition)) { + if (condition instanceof Collection) { + for (Object val : ((Collection) this.condition)) { list.add(mapCondition(val, context)); } } else { @@ -326,14 +327,14 @@ public interface IfNullBuilder { /** * @param fieldReference the field to check for a {@literal null} value, field reference must not be - * {@literal null}. + * {@literal null}. * @return the {@link ThenBuilder} */ ThenBuilder ifNull(String fieldReference); /** * @param expression the expression to check for a {@literal null} value, field name must not be {@literal null} - * or empty. + * or empty. * @return the {@link ThenBuilder}. */ ThenBuilder ifNull(AggregationExpression expression); @@ -346,7 +347,8 @@ public interface IfNullBuilder { public interface OrBuilder { /** - * @param fieldReference the field to check for a {@literal null} value, field reference must not be {@literal null}. + * @param fieldReference the field to check for a {@literal null} value, field reference must not be + * {@literal null}. * @return the {@link ThenBuilder} */ ThenBuilder orIfNull(String fieldReference); @@ -365,8 +367,8 @@ public interface ThenBuilder extends OrBuilder { /** * @param value the value to be used if the {@code $ifNull} condition evaluates {@literal true}. Can be a - * {@link Document}, a value that is supported by MongoDB or a value that can be converted to a MongoDB - * representation but must not be {@literal null}. + * {@link Document}, a value that is supported by MongoDB or a value that can be converted to a MongoDB + * representation but must not be {@literal null}. * @return new instance of {@link IfNull}. */ IfNull then(Object value); @@ -499,7 +501,7 @@ public static Switch switchCases(CaseOperator... conditions) { public static Switch switchCases(List conditions) { Assert.notNull(conditions, "Conditions must not be null!"); - return new Switch(Collections.singletonMap("branches", new ArrayList(conditions))); + return new Switch(Collections. singletonMap("branches", new ArrayList(conditions))); } /** @@ -586,7 +588,7 @@ public interface ThenBuilder { * @author Mark Paluch * @author Christoph Strobl * @see https://docs.mongodb.com/manual/reference/operator/aggregation/cond/ + * "https://docs.mongodb.com/manual/reference/operator/aggregation/cond/">https://docs.mongodb.com/manual/reference/operator/aggregation/cond/ */ public static class Cond implements AggregationExpression { @@ -847,8 +849,8 @@ public interface ThenBuilder { /** * @param value the value to be used if the condition evaluates {@literal true}. Can be a {@link Document}, a - * value that is supported by MongoDB or a value that can be converted to a MongoDB representation but - * must not be {@literal null}. + * value that is supported by MongoDB or a value that can be converted to a MongoDB representation but + * must not be {@literal null}. * @return the {@link OtherwiseBuilder} */ OtherwiseBuilder then(Object value); @@ -873,8 +875,8 @@ public interface OtherwiseBuilder { /** * @param value the value to be used if the condition evaluates {@literal false}. Can be a {@link Document}, a - * value that is supported by MongoDB or a value that can be converted to a MongoDB representation but - * must not be {@literal null}. + * value that is supported by MongoDB or a value that can be converted to a MongoDB representation but + * must not be {@literal null}. * @return the {@link Cond} */ Cond otherwise(Object value); @@ -902,8 +904,7 @@ static class ConditionalExpressionBuilder implements WhenBuilder, ThenBuilder, O private @Nullable Object condition; private @Nullable Object thenValue; - private ConditionalExpressionBuilder() { - } + private ConditionalExpressionBuilder() {} /** * Creates a new builder for {@link Cond}. diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java index 132600cbb5..3b88781616 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperatorsUnitTests.java @@ -22,9 +22,11 @@ import org.junit.jupiter.api.Test; /** + * Unit tests for {@link ConditionalOperators}. + * * @author Christoph Strobl */ -public class ConditionalOperatorsUnitTests { +class ConditionalOperatorsUnitTests { @Test // GH-3720 void rendersIfNullWithMultipleConditionalValuesCorrectly() { From df2b2a2f685b46939126c5aa9998d34a8e5270d6 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Fri, 23 Jul 2021 10:46:27 +0200 Subject: [PATCH 090/983] Add support for `$integral` aggregation operator. Closes: #3721 Original pull request: #3746. --- .../core/aggregation/ArithmeticOperators.java | 87 +++++++++++++++++-- .../core/spel/MethodReferenceNode.java | 1 + .../ArithmeticOperatorsUnitTests.java | 10 +++ .../SpelExpressionTransformerUnitTests.java | 10 +++ 4 files changed, 100 insertions(+), 8 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 39579fc7b6..4d86bac98e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -216,6 +216,27 @@ public Floor floor() { return usesFieldRef() ? Floor.floorValueOf(fieldReference) : Floor.floorValueOf(expression); } + /** + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @return new instance of {@link Integral}. + * @since 3.3 + */ + public Integral integral() { + return usesFieldRef() ? Integral.integralOf(fieldReference) : Integral.integralOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @param unit the unit of measure. + * @return new instance of {@link Integral}. + * @since 3.3 + */ + public Integral integral(String unit) { + return integral().unit(unit); + } + /** * Creates new {@link AggregationExpression} that calculates the natural logarithm ln (i.e loge) of the assoicated * number. @@ -520,8 +541,8 @@ public StdDevSamp stdDevSamp() { } /** - * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the given - * field to calculate the population covariance of the two. + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the population covariance of the two. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link CovariancePop}. @@ -532,8 +553,8 @@ public CovariancePop covariancePop(String fieldReference) { } /** - * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the given - * {@link AggregationExpression expression} to calculate the population covariance of the two. + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the population covariance of the two. * * @param expression must not be {@literal null}. * @return new instance of {@link CovariancePop}. @@ -548,8 +569,8 @@ private CovariancePop covariancePop() { } /** - * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the given - * field to calculate the sample covariance of the two. + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the sample covariance of the two. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link CovariancePop}. @@ -560,8 +581,8 @@ public CovarianceSamp covarianceSamp(String fieldReference) { } /** - * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the given - * {@link AggregationExpression expression} to calculate the sample covariance of the two. + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the sample covariance of the two. * * @param expression must not be {@literal null}. * @return new instance of {@link CovariancePop}. @@ -1798,4 +1819,54 @@ protected String getMongoMethod() { return "$derivative"; } } + + /** + * Value object to represent an {@link AggregationExpression expression} that calculates the approximation for the + * mathematical integral value. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Integral extends AbstractAggregationExpression { + + private Integral(Object value) { + super(value); + } + + /** + * Create a new instance of {@link Integral} for the value stored at the given field holding a numeric value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Integral}. + */ + public static Integral integralOf(String fieldReference) { + return new Integral(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Create a new instance of {@link Integral} for the value provided by the given expression that resolves to a + * numeric value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Integral}. + */ + public static Integral integralOf(AggregationExpression expression) { + return new Integral(Collections.singletonMap("input", expression)); + } + + /** + * Set the unit of measure. + * + * @param unit the unit of measure. + * @return new instance of {@link Integral}. + */ + public Integral unit(String unit) { + return new Integral(append("unit", unit)); + } + + @Override + protected String getMongoMethod() { + return "$integral"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 928869e93b..9be1368caf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -92,6 +92,7 @@ public class MethodReferenceNode extends ExpressionNode { map.put("trunc", singleArgRef().forOperator("$trunc")); map.put("round", arrayArgRef().forOperator("$round")); map.put("derivative", mapArgRef().forOperator("$derivative").mappingParametersTo("input", "unit")); + map.put("integral", mapArgRef().forOperator("$integral").mappingParametersTo("input", "unit")); // STRING OPERATORS map.put("concat", arrayArgRef().forOperator("$concat")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index da03bc5c61..1aab826a23 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -68,4 +68,14 @@ void rendersDerivativeCorrectly() { valueOf("miles").derivative(SetWindowFieldsOperation.WindowUnits.HOUR).toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo(Document.parse("{ $derivative: { input: \"$miles\", unit: \"hour\" } }")); } + + @Test // GH-3721 + void rendersIntegral() { + assertThat(valueOf("kilowatts").integral().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse("{ $integral : { input : \"$kilowatts\" } }")); + } + + @Test // GH-3721 + void rendersIntegralWithUnit() { + assertThat(valueOf("kilowatts").integral("hour").toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse("{ $integral : { input : \"$kilowatts\", unit : \"hour\" } }")); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 2653c52f2d..a0fad05a3c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -994,6 +994,16 @@ void shouldRenderDerivative() { assertThat(transform("derivative(miles, 'hour')")).isEqualTo(Document.parse("{ \"$derivative\" : { input : '$miles', unit : 'hour'} }")); } + @Test // GH-3721 + public void shouldRenderIntegral() { + assertThat(transform("integral(field)")).isEqualTo(Document.parse("{ \"$integral\" : { \"input\" : \"$field\" }}")); + } + + @Test // GH-3721 + public void shouldIntegralWithUnit() { + assertThat(transform("integral(field, 'hour')")).isEqualTo(Document.parse("{ \"$integral\" : { \"input\" : \"$field\", \"unit\" : \"hour\" }}")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From 2a3a4cf030eefdcdd491cd68c532cfffff86bd20 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 24 Aug 2021 14:59:58 +0200 Subject: [PATCH 091/983] Polishing. Fix method order from earlier merges. Add missing Javadoc. Simplify tests. Update documentation. See #3721 Original pull request: #3746. --- .../core/aggregation/ArithmeticOperators.java | 118 ++++++++++++------ .../ArithmeticOperatorsUnitTests.java | 11 +- .../SpelExpressionTransformerUnitTests.java | 51 ++++---- .../reference/aggregation-framework.adoc | 2 +- 4 files changed, 109 insertions(+), 73 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 4d86bac98e..159c6bbeae 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -155,6 +155,46 @@ public Ceil ceil() { return usesFieldRef() ? Ceil.ceilValueOf(fieldReference) : Ceil.ceilValueOf(expression); } + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative() { + return derivative((String) null); + } + + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @param unit The time unit ({@link WindowUnits#WEEK}, {@link WindowUnits#DAY}, {@link WindowUnits#HOUR}, + * {@link WindowUnits#MINUTE}, {@link WindowUnits#SECOND}, {@link WindowUnits#MILLISECOND}) to apply. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative(WindowUnit unit) { + + Assert.notNull(unit, "Window unit must not be null"); + + return derivative(unit.name().toLowerCase(Locale.ROOT)); + } + + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @param unit The time unit ({@literal week, day, hour, minute, second, millisecond}) to apply can be + * {@literal null}. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative(@Nullable String unit) { + + Derivative derivative = usesFieldRef() ? Derivative.derivativeOf(fieldReference) + : Derivative.derivativeOf(expression); + return StringUtils.hasText(unit) ? derivative.unit(unit) : derivative; + } + /** * Creates new {@link AggregationExpression} that ivides the associated number by number referenced via * {@literal fieldReference}. @@ -226,6 +266,21 @@ public Integral integral() { return usesFieldRef() ? Integral.integralOf(fieldReference) : Integral.integralOf(expression); } + /** + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @param unit The time unit ({@link WindowUnits#WEEK}, {@link WindowUnits#DAY}, {@link WindowUnits#HOUR}, + * {@link WindowUnits#MINUTE}, {@link WindowUnits#SECOND}, {@link WindowUnits#MILLISECOND}) to apply. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Integral integral(WindowUnit unit) { + + Assert.notNull(unit, "Window unit must not be null"); + + return integral(unit.name().toLowerCase(Locale.ROOT)); + } + /** * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. * @@ -234,6 +289,9 @@ public Integral integral() { * @since 3.3 */ public Integral integral(String unit) { + + Assert.hasText(unit, "Unit must not be empty!"); + return integral().unit(unit); } @@ -618,46 +676,6 @@ public Round roundToPlace(int place) { return round().place(place); } - /** - * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. - * - * @return new instance of {@link Derivative}. - * @since 3.3 - */ - public Derivative derivative() { - return derivative((String) null); - } - - /** - * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. - * - * @param unit The time unit ({@link WindowUnits#WEEK}, {@link WindowUnits#DAY}, {@link WindowUnits#HOUR}, - * {@link WindowUnits#MINUTE}, {@link WindowUnits#SECOND}, {@link WindowUnits#MILLISECOND}) to apply. - * @return new instance of {@link Derivative}. - * @since 3.3 - */ - public Derivative derivative(WindowUnit unit) { - - Assert.notNull(unit, "Window unit must not be null"); - - return derivative(unit.name().toLowerCase(Locale.ROOT)); - } - - /** - * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. - * - * @param unit The time unit ({@literal week, day, hour, minute, second, millisecond}) to apply can be - * {@literal null}. - * @return new instance of {@link Derivative}. - * @since 3.3 - */ - public Derivative derivative(@Nullable String unit) { - - Derivative derivative = usesFieldRef() ? Derivative.derivativeOf(fieldReference) - : Derivative.derivativeOf(expression); - return StringUtils.hasText(unit) ? derivative.unit(unit) : derivative; - } - private boolean usesFieldRef() { return fieldReference != null; } @@ -1792,16 +1810,36 @@ protected String getMongoMethod() { } } + /** + * Value object to represent an {@link AggregationExpression expression} that calculates the average rate of change + * within the specified window. + * + * @author Christoph Strobl + * @since 3.3 + */ public static class Derivative extends AbstractAggregationExpression { private Derivative(Object value) { super(value); } + /** + * Create a new instance of {@link Derivative} for the value stored at the given field holding a numeric value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Derivative}. + */ public static Derivative derivativeOf(String fieldReference) { return new Derivative(Collections.singletonMap("input", Fields.field(fieldReference))); } + /** + * Create a new instance of {@link Derivative} for the value provided by the given expression that resolves to a + * numeric value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Derivative}. + */ public static Derivative derivativeOf(AggregationExpression expression) { return new Derivative(Collections.singletonMap("input", expression)); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index 1aab826a23..d57363d91c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -15,8 +15,8 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.assertj.core.api.Assertions.*; import static org.springframework.data.mongodb.core.aggregation.ArithmeticOperators.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.util.Arrays; import java.util.Collections; @@ -66,16 +66,19 @@ void rendersDerivativeCorrectly() { assertThat( valueOf("miles").derivative(SetWindowFieldsOperation.WindowUnits.HOUR).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $derivative: { input: \"$miles\", unit: \"hour\" } }")); + .isEqualTo("{ $derivative: { input: \"$miles\", unit: \"hour\" } }"); } @Test // GH-3721 void rendersIntegral() { - assertThat(valueOf("kilowatts").integral().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse("{ $integral : { input : \"$kilowatts\" } }")); + assertThat(valueOf("kilowatts").integral().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $integral : { input : \"$kilowatts\" } }"); } @Test // GH-3721 void rendersIntegralWithUnit() { - assertThat(valueOf("kilowatts").integral("hour").toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse("{ $integral : { input : \"$kilowatts\", unit : \"hour\" } }")); + assertThat(valueOf("kilowatts").integral(SetWindowFieldsOperation.WindowUnits.HOUR) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $integral : { input : \"$kilowatts\", unit : \"hour\" } }"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index a0fad05a3c..0450e556c4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -23,8 +23,8 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; + import org.springframework.data.mongodb.core.Person; -import org.springframework.lang.Nullable; /** * Unit tests for {@link SpelExpressionTransformer}. @@ -152,8 +152,8 @@ void shouldRenderBinaryExpressionWithMixedSignsCorrectly() { @Test // DATAMONGO-774 void shouldRenderConsecutiveOperationsInComplexExpression() { - assertThat(transform("1 + 1 + (1 + 1 + 1) / q")).isEqualTo( - Document.parse("{ \"$add\" : [ 1 , 1 , { \"$divide\" : [ { \"$add\" : [ 1 , 1 , 1]} , \"$q\"]}]}")); + assertThat(transform("1 + 1 + (1 + 1 + 1) / q")) + .isEqualTo(Document.parse("{ \"$add\" : [ 1 , 1 , { \"$divide\" : [ { \"$add\" : [ 1 , 1 , 1]} , \"$q\"]}]}")); } @Test // DATAMONGO-774 @@ -189,8 +189,7 @@ void shouldRenderCompoundExpressionsWithIndexerAndFieldReference() { Person person = new Person(); person.setAge(10); - assertThat(transform("[0].age + a.c", person)) - .isEqualTo(Document.parse("{ \"$add\" : [ 10 , \"$a.c\"] }")); + assertThat(transform("[0].age + a.c", person)).isEqualTo(Document.parse("{ \"$add\" : [ 10 , \"$a.c\"] }")); } @Test // DATAMONGO-840 @@ -216,8 +215,7 @@ void shouldRenderMethodReferenceNodeNot() { @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeSetEquals() { - assertThat(transform("setEquals(a, b)")) - .isEqualTo(Document.parse("{ \"$setEquals\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("setEquals(a, b)")).isEqualTo(Document.parse("{ \"$setEquals\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 @@ -379,8 +377,7 @@ void shouldRenderMethodReferenceTrunc() { @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeConcat() { - assertThat(transform("concat(a, b, 'c')")) - .isEqualTo(Document.parse("{ \"$concat\" : [ \"$a\" , \"$b\" , \"c\"]}")); + assertThat(transform("concat(a, b, 'c')")).isEqualTo(Document.parse("{ \"$concat\" : [ \"$a\" , \"$b\" , \"c\"]}")); } @Test // DATAMONGO-1530 @@ -400,8 +397,7 @@ void shouldRenderMethodReferenceToUpper() { @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeStrCaseCmp() { - assertThat(transform("strcasecmp(a, b)")) - .isEqualTo(Document.parse("{ \"$strcasecmp\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("strcasecmp(a, b)")).isEqualTo(Document.parse("{ \"$strcasecmp\" : [ \"$a\" , \"$b\"]}")); } @Test // DATAMONGO-1530 @@ -411,8 +407,7 @@ void shouldRenderMethodReferenceMeta() { @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeArrayElemAt() { - assertThat(transform("arrayElemAt(a, 10)")) - .isEqualTo(Document.parse("{ \"$arrayElemAt\" : [ \"$a\" , 10]}")); + assertThat(transform("arrayElemAt(a, 10)")).isEqualTo(Document.parse("{ \"$arrayElemAt\" : [ \"$a\" , 10]}")); } @Test // DATAMONGO-1530 @@ -511,15 +506,14 @@ void shouldRenderMethodReferenceMillisecond() { @Test // DATAMONGO-1530 void shouldRenderMethodReferenceDateToString() { - assertThat(transform("dateToString('%Y-%m-%d', $date)")).isEqualTo( - Document.parse("{ \"$dateToString\" : { \"format\" : \"%Y-%m-%d\" , \"date\" : \"$date\"}}")); + assertThat(transform("dateToString('%Y-%m-%d', $date)")) + .isEqualTo(Document.parse("{ \"$dateToString\" : { \"format\" : \"%Y-%m-%d\" , \"date\" : \"$date\"}}")); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceCond() { assertThat(transform("cond(qty > 250, 30, 20)")).isEqualTo( - Document - .parse("{ \"$cond\" : { \"if\" : { \"$gt\" : [ \"$qty\" , 250]} , \"then\" : 30 , \"else\" : 20}}")); + Document.parse("{ \"$cond\" : { \"if\" : { \"$gt\" : [ \"$qty\" , 250]} , \"then\" : 30 , \"else\" : 20}}")); } @Test // DATAMONGO-1530 @@ -633,8 +627,7 @@ void shouldRenderOperationNodeAnd() { @Test // DATAMONGO-1530 void shouldRenderComplexOperationNodeAnd() { assertThat(transform("1+2 && concat(a, b) && true")).isEqualTo( - Document - .parse("{ \"$and\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}")); + Document.parse("{ \"$and\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}")); } @Test // DATAMONGO-1530 @@ -644,8 +637,7 @@ void shouldRenderNotCorrectly() { @Test // DATAMONGO-1530 void shouldRenderComplexNotCorrectly() { - assertThat(transform("!(foo > 10)")) - .isEqualTo(Document.parse("{ \"$not\" : [ { \"$gt\" : [ \"$foo\" , 10]}]}")); + assertThat(transform("!(foo > 10)")).isEqualTo(Document.parse("{ \"$not\" : [ { \"$gt\" : [ \"$foo\" , 10]}]}")); } @Test // DATAMONGO-1548 @@ -951,12 +943,14 @@ void shouldRenderRoundWithPlace() { @Test // GH-3712 void shouldRenderCovariancePop() { - assertThat(transform("covariancePop(field1, field2)")).isEqualTo(Document.parse("{ \"$covariancePop\" : [\"$field1\", \"$field2\"]}")); + assertThat(transform("covariancePop(field1, field2)")) + .isEqualTo(Document.parse("{ \"$covariancePop\" : [\"$field1\", \"$field2\"]}")); } @Test // GH-3712 void shouldRenderCovarianceSamp() { - assertThat(transform("covarianceSamp(field1, field2)")).isEqualTo(Document.parse("{ \"$covarianceSamp\" : [\"$field1\", \"$field2\"]}")); + assertThat(transform("covarianceSamp(field1, field2)")) + .isEqualTo(Document.parse("{ \"$covarianceSamp\" : [\"$field1\", \"$field2\"]}")); } @Test // GH-3715 @@ -988,20 +982,21 @@ void rendersShiftWithDefault() { .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }")); } - @Nullable @Test // GH-3716 void shouldRenderDerivative() { - assertThat(transform("derivative(miles, 'hour')")).isEqualTo(Document.parse("{ \"$derivative\" : { input : '$miles', unit : 'hour'} }")); + assertThat(transform("derivative(miles, 'hour')")) + .isEqualTo(Document.parse("{ \"$derivative\" : { input : '$miles', unit : 'hour'} }")); } @Test // GH-3721 - public void shouldRenderIntegral() { + void shouldRenderIntegral() { assertThat(transform("integral(field)")).isEqualTo(Document.parse("{ \"$integral\" : { \"input\" : \"$field\" }}")); } @Test // GH-3721 - public void shouldIntegralWithUnit() { - assertThat(transform("integral(field, 'hour')")).isEqualTo(Document.parse("{ \"$integral\" : { \"input\" : \"$field\", \"unit\" : \"hour\" }}")); + void shouldRenderIntegralWithUnit() { + assertThat(transform("integral(field, 'hour')")) + .isEqualTo(Document.parse("{ \"$integral\" : { \"input\" : \"$field\", \"unit\" : \"hour\" }}")); } private Object transform(String expression, Object... params) { diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index 2624e6c27e..9b00811a7b 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -85,7 +85,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `count` (+++*+++), `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators -| `abs`, `add` (+++*+++ via `plus`), `ceil`, `derivative`, `divide`, `exp`, `floor`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `trunc` +| `abs`, `add` (+++*+++ via `plus`), `ceil`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `trunc` | String Aggregation Operators | `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` From ec16b873b7f52477812c92b7504ec8a5306ede2c Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 28 Jul 2021 08:03:38 +0200 Subject: [PATCH 092/983] Add support for `$degreesToRadians` aggregation operator. Closes: #3714 Original pull request: #3755. --- .../core/aggregation/ConvertOperators.java | 58 +++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 1 + .../ConvertOperatorsUnitTests.java | 7 +++ .../SpelExpressionTransformerUnitTests.java | 5 ++ 4 files changed, 71 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java index 315a463e1f..b34933444a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java @@ -231,6 +231,16 @@ public ToString convertToString() { return ToString.toString(valueObject()); } + /** + * {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to radians.\ + * + * @return new instance of {@link DegreesToRadians}. + * @since 3.3 + */ + public DegreesToRadians convertDegreesToRadians() { + return DegreesToRadians.degreesToRadians(valueObject()); + } + private Convert createConvert() { return usesFieldRef() ? Convert.convertValueOf(fieldReference) : Convert.convertValueOf(expression); } @@ -692,4 +702,52 @@ protected String getMongoMethod() { return "$toString"; } } + + /** + * {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DegreesToRadians extends AbstractAggregationExpression { + + private DegreesToRadians(Object value) { + super(value); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the value of the given field, measured in degrees, to radians. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadiansOf(String fieldName) { + return degreesToRadians(Fields.field(fieldName)); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the result of the given {@link AggregationExpression expression}, measured in degrees, to radians. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadiansOf(AggregationExpression expression) { + return degreesToRadians(expression); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the given value, measured in degrees, to radians. + * + * @param value must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadians(Object value) { + return new DegreesToRadians(value); + } + + @Override + protected String getMongoMethod() { + return "$degreesToRadians"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 9be1368caf..f0799a1af6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -199,6 +199,7 @@ public class MethodReferenceNode extends ExpressionNode { map.put("toLong", singleArgRef().forOperator("$toLong")); map.put("toObjectId", singleArgRef().forOperator("$toObjectId")); map.put("toString", singleArgRef().forOperator("$toString")); + map.put("degreesToRadians", singleArgRef().forOperator("$degreesToRadians")); FUNCTIONS = Collections.unmodifiableMap(map); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConvertOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConvertOperatorsUnitTests.java index a44c932723..c794cf8102 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConvertOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ConvertOperatorsUnitTests.java @@ -222,4 +222,11 @@ public void toStringUsingExpression() { assertThat(ConvertOperators.valueOf(EXPRESSION).convertToString().toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo(Document.parse("{ $toString: " + EXPRESSION_STRING + " } ")); } + + @Test // GH-3714 + void degreesToRadiansUsingFieldReference() { + + assertThat(ConvertOperators.valueOf("angle_a").convertDegreesToRadians().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $degreesToRadians : \"$angle_a\"}")); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 0450e556c4..9cad6cbb15 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -941,6 +941,11 @@ void shouldRenderRoundWithPlace() { assertThat(transform("round(field, 2)")).isEqualTo(Document.parse("{ \"$round\" : [\"$field\", 2]}")); } + @Test // GH-3714 + void shouldRenderDegreesToRadians() { + assertThat(transform("degreesToRadians(angle_a)")).isEqualTo(Document.parse("{ \"$degreesToRadians\" : \"$angle_a\"}")); + } + @Test // GH-3712 void shouldRenderCovariancePop() { assertThat(transform("covariancePop(field1, field2)")) From 0db47169cffca0ca3af462c6f61d9178538cfad9 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 28 Jul 2021 09:40:52 +0200 Subject: [PATCH 093/983] Add support for `$sin` and `$sinh` aggregation operators. Closes: #3728 Original pull request: #3755. --- .../core/aggregation/ArithmeticOperators.java | 246 ++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 2 + .../ArithmeticOperatorsUnitTests.java | 28 ++ .../SpelExpressionTransformerUnitTests.java | 10 + 4 files changed, 286 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 159c6bbeae..db328338e3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -31,6 +31,7 @@ import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.WindowUnits; import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; /** @@ -676,6 +677,48 @@ public Round roundToPlace(int place) { return round().place(place); } + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in {@link AngularDimension#RADIANS radians}. + * + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Sin sin() { + return sin(AngularDimension.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value in the given {@link AngularDimension unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Sin sin(AngularDimension unit) { + return usesFieldRef() ? Sin.sinOf(fieldReference, unit) : Sin.sinOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in {@link AngularDimension#RADIANS radians}. + * + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Sinh sinh() { + return sinh(AngularDimension.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Sinh sinh(AngularDimension unit) { + return usesFieldRef() ? Sinh.sinhOf(fieldReference, unit) : Sinh.sinhOf(expression, unit); + } + private boolean usesFieldRef() { return fieldReference != null; } @@ -1907,4 +1950,207 @@ protected String getMongoMethod() { return "$integral"; } } + + /** + * The unit of measure for computations that operate upon angles. + * + * @author Christoph Strobl + * @since 3.3 + */ + public enum AngularDimension { + RADIANS, DEGREES + } + + /** + * An {@link AggregationExpression expression} that calculates the sine of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Sin extends AbstractAggregationExpression { + + private Sin(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularDimension#RADIANS radians}. + *

        + * Use {@code sinhOf("angle", DEGREES)} as shortcut for

        { $sinh : { $degreesToRadians : "$angle" } }
        . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(String fieldReference) { + return sinOf(fieldReference, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(String fieldReference, AngularDimension unit) { + return sin(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(AggregationExpression expression) { + return sinOf(expression, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(AggregationExpression expression, AngularDimension unit) { + return sin(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Sin}. + */ + public static Sin sin(Object value) { + return sin(value, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sin(Object value, AngularDimension unit) { + + if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + return new Sin(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Sin(value); + } + + @Override + protected String getMongoMethod() { + return "$sin"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Sinh extends AbstractAggregationExpression { + + private Sinh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(String fieldReference) { + return sinhOf(fieldReference, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularDimension unit}. + *

        + * Use {@code sinhOf("angle", DEGREES)} as shortcut for

        { $sinh : { $degreesToRadians : "$angle" } }
        . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(String fieldReference, AngularDimension unit) { + return sinh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularDimension#RADIANS}. + *

        + * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(AggregationExpression expression) { + return sinhOf(expression, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularDimension unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(AggregationExpression expression, AngularDimension unit) { + return sinh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinh(Object value) { + return sinh(value, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularDimension unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinh(Object value, AngularDimension unit) { + + if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + return new Sinh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Sinh(value); + } + + @Override + protected String getMongoMethod() { + return "$sinh"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index f0799a1af6..9ee12be1eb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -93,6 +93,8 @@ public class MethodReferenceNode extends ExpressionNode { map.put("round", arrayArgRef().forOperator("$round")); map.put("derivative", mapArgRef().forOperator("$derivative").mappingParametersTo("input", "unit")); map.put("integral", mapArgRef().forOperator("$integral").mappingParametersTo("input", "unit")); + map.put("sin", singleArgRef().forOperator("$sin")); + map.put("sinh", singleArgRef().forOperator("$sinh")); // STRING OPERATORS map.put("concat", arrayArgRef().forOperator("$concat")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index d57363d91c..cc32a94323 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -81,4 +81,32 @@ void rendersIntegralWithUnit() { .toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo("{ $integral : { input : \"$kilowatts\", unit : \"hour\" } }"); } + + @Test // GH-3728 + void rendersSin() { + + assertThat(valueOf("angle").sin().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $sin : \"$angle\" }")); + } + + @Test // GH-3728 + void rendersSinWithValueInDegrees() { + + assertThat(valueOf("angle").sin(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $sin : { $degreesToRadians : \"$angle\" } }")); + } + + @Test // GH-3728 + void rendersSinh() { + + assertThat(valueOf("angle").sinh().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $sinh : \"$angle\" }")); + } + + @Test // GH-3728 + void rendersSinhWithValueInDegrees() { + + assertThat(valueOf("angle").sinh(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $sinh : { $degreesToRadians : \"$angle\" } }")); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 9cad6cbb15..e250241558 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1004,6 +1004,16 @@ void shouldRenderIntegralWithUnit() { .isEqualTo(Document.parse("{ \"$integral\" : { \"input\" : \"$field\", \"unit\" : \"hour\" }}")); } + @Test // GH-3728 + void shouldRenderSin() { + assertThat(transform("sin(angle)")).isEqualTo(Document.parse("{ \"$sin\" : \"$angle\"}")); + } + + @Test // GH-3728 + void shouldRenderSinh() { + assertThat(transform("sinh(angle)")).isEqualTo(Document.parse("{ \"$sinh\" : \"$angle\"}")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From 73d5886aae13082d24b7f49db91da322da509952 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 28 Jul 2021 09:54:05 +0200 Subject: [PATCH 094/983] Add support for `$tan` and `$tanh` aggregation operators. Closes: #3730 Original pull request: #3755. --- .../core/aggregation/ArithmeticOperators.java | 251 ++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 2 + .../ArithmeticOperatorsUnitTests.java | 29 ++ .../SpelExpressionTransformerUnitTests.java | 10 + 4 files changed, 292 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index db328338e3..e26e41f651 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -719,6 +719,51 @@ public Sinh sinh(AngularDimension unit) { return usesFieldRef() ? Sinh.sinhOf(fieldReference, unit) : Sinh.sinhOf(expression, unit); } + /** + * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value given in + * {@link AngularDimension#RADIANS radians}. + * + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Tan tan() { + return tan(AngularDimension.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value in the given + * {@link AngularDimension unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Tan tan(AngularDimension unit) { + return usesFieldRef() ? Tan.tanOf(fieldReference, unit) : Tan.tanOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value given in + * {@link AngularDimension#RADIANS radians}. + * + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Tanh tanh() { + return tanh(AngularDimension.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Tanh tanh(AngularDimension unit) { + return usesFieldRef() ? Tanh.tanhOf(fieldReference, unit) : Tanh.tanhOf(expression, unit); + } + private boolean usesFieldRef() { return fieldReference != null; } @@ -2153,4 +2198,210 @@ protected String getMongoMethod() { return "$sinh"; } } + + /** + * An {@link AggregationExpression expression} that calculates the tangent of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Tan extends AbstractAggregationExpression { + + private Tan(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularDimension#RADIANS radians}. + *

        + * Use {@code tanOf("angle", DEGREES)} as shortcut for + * + *

        +		 * { $tan : { $degreesToRadians : "$angle" } }
        +		 * 
        + * + * . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(String fieldReference) { + return tanOf(fieldReference, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(String fieldReference, AngularDimension unit) { + return tan(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(AggregationExpression expression) { + return tanOf(expression, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(AggregationExpression expression, AngularDimension unit) { + return tan(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Tan}. + */ + public static Tan tan(Object value) { + return tan(value, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tan(Object value, AngularDimension unit) { + + if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + return new Tan(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Tan(value); + } + + @Override + protected String getMongoMethod() { + return "$tan"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Tanh extends AbstractAggregationExpression { + + private Tanh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(String fieldReference) { + return tanhOf(fieldReference, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularDimension unit}. + *

        + * Use {@code tanhOf("angle", DEGREES)} as shortcut for + * + *

        +		 * { $tanh : { $degreesToRadians : "$angle" } }
        +		 * 
        + * + * . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(String fieldReference, AngularDimension unit) { + return tanh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularDimension#RADIANS}. + *

        + * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(AggregationExpression expression) { + return tanhOf(expression, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularDimension unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(AggregationExpression expression, AngularDimension unit) { + return tanh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanh(Object value) { + return tanh(value, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularDimension unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanh(Object value, AngularDimension unit) { + + if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + return new Tanh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Tanh(value); + } + + @Override + protected String getMongoMethod() { + return "$tanh"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 9ee12be1eb..a2d011d6ad 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -95,6 +95,8 @@ public class MethodReferenceNode extends ExpressionNode { map.put("integral", mapArgRef().forOperator("$integral").mappingParametersTo("input", "unit")); map.put("sin", singleArgRef().forOperator("$sin")); map.put("sinh", singleArgRef().forOperator("$sinh")); + map.put("tan", singleArgRef().forOperator("$tan")); + map.put("tanh", singleArgRef().forOperator("$tanh")); // STRING OPERATORS map.put("concat", arrayArgRef().forOperator("$concat")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index cc32a94323..9a77d093c4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -109,4 +109,33 @@ void rendersSinhWithValueInDegrees() { assertThat(valueOf("angle").sinh(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo(Document.parse("{ $sinh : { $degreesToRadians : \"$angle\" } }")); } + + @Test // GH-3730 + void rendersTan() { + + assertThat(valueOf("angle").tan().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $tan : \"$angle\" }")); + } + + @Test // GH-3730 + void rendersTanWithValueInDegrees() { + + assertThat(valueOf("angle").tan(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $tan : { $degreesToRadians : \"$angle\" } }")); + } + + @Test // GH-3730 + void rendersTanh() { + + assertThat(valueOf("angle").tanh().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $tanh : \"$angle\" }")); + } + + @Test // GH-3730 + void rendersTanhWithValueInDegrees() { + + assertThat(valueOf("angle").tanh(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $tanh : { $degreesToRadians : \"$angle\" } }")); + } + } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index e250241558..cc59a91700 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1014,6 +1014,16 @@ void shouldRenderSinh() { assertThat(transform("sinh(angle)")).isEqualTo(Document.parse("{ \"$sinh\" : \"$angle\"}")); } + @Test // GH-3730 + void shouldRenderTan() { + assertThat(transform("tan(angle)")).isEqualTo(Document.parse("{ \"$tan\" : \"$angle\"}")); + } + + @Test // GH-3730 + void shouldRenderTanh() { + assertThat(transform("tanh(angle)")).isEqualTo(Document.parse("{ \"$tanh\" : \"$angle\"}")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From c4c6267d91b89fd299f47a1a8604d86e9e87e53d Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 28 Jul 2021 10:04:29 +0200 Subject: [PATCH 095/983] Add support for `$cos` and `$cosh` aggregation operators. Closes: #3710 Original pull request: #3755. --- .../core/aggregation/ArithmeticOperators.java | 251 ++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 2 + .../ArithmeticOperatorsUnitTests.java | 28 ++ .../SpelExpressionTransformerUnitTests.java | 10 + 4 files changed, 291 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index e26e41f651..4de258b4eb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -719,6 +719,51 @@ public Sinh sinh(AngularDimension unit) { return usesFieldRef() ? Sinh.sinhOf(fieldReference, unit) : Sinh.sinhOf(expression, unit); } + /** + * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value given in + * {@link AngularDimension#RADIANS radians}. + * + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Cos cos() { + return cos(AngularDimension.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value in the given + * {@link AngularDimension unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Cos cos(AngularDimension unit) { + return usesFieldRef() ? Cos.cosOf(fieldReference, unit) : Cos.cosOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value given in + * {@link AngularDimension#RADIANS radians}. + * + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Cosh cosh() { + return cosh(AngularDimension.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Cosh cosh(AngularDimension unit) { + return usesFieldRef() ? Cosh.coshOf(fieldReference, unit) : Cosh.coshOf(expression, unit); + } + /** * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value given in * {@link AngularDimension#RADIANS radians}. @@ -2199,6 +2244,212 @@ protected String getMongoMethod() { } } + /** + * An {@link AggregationExpression expression} that calculates the cosine of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Cos extends AbstractAggregationExpression { + + private Cos(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularDimension#RADIANS radians}. + *

        + * Use {@code cosOf("angle", DEGREES)} as shortcut for + * + *

        +		 * { $cos : { $degreesToRadians : "$angle" } }
        +		 * 
        + * + * . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(String fieldReference) { + return cosOf(fieldReference, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(String fieldReference, AngularDimension unit) { + return cos(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(AggregationExpression expression) { + return cosOf(expression, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(AggregationExpression expression, AngularDimension unit) { + return cos(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Cos}. + */ + public static Cos cos(Object value) { + return cos(value, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularDimension unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cos(Object value, AngularDimension unit) { + + if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + return new Cos(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Cos(value); + } + + @Override + protected String getMongoMethod() { + return "$cos"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Cosh extends AbstractAggregationExpression { + + private Cosh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(String fieldReference) { + return coshOf(fieldReference, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularDimension unit}. + *

        + * Use {@code coshOf("angle", DEGREES)} as shortcut for + * + *

        +		 * { $cosh : { $degreesToRadians : "$angle" } }
        +		 * 
        + * + * . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(String fieldReference, AngularDimension unit) { + return cosh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularDimension#RADIANS}. + *

        + * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(AggregationExpression expression) { + return coshOf(expression, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularDimension unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(AggregationExpression expression, AngularDimension unit) { + return cosh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularDimension#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh cosh(Object value) { + return cosh(value, AngularDimension.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularDimension unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh cosh(Object value, AngularDimension unit) { + + if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + return new Cosh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Cosh(value); + } + + @Override + protected String getMongoMethod() { + return "$cosh"; + } + } + /** * An {@link AggregationExpression expression} that calculates the tangent of a value that is measured in radians. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index a2d011d6ad..1efe94c757 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -95,6 +95,8 @@ public class MethodReferenceNode extends ExpressionNode { map.put("integral", mapArgRef().forOperator("$integral").mappingParametersTo("input", "unit")); map.put("sin", singleArgRef().forOperator("$sin")); map.put("sinh", singleArgRef().forOperator("$sinh")); + map.put("cos", singleArgRef().forOperator("$cos")); + map.put("cosh", singleArgRef().forOperator("$cosh")); map.put("tan", singleArgRef().forOperator("$tan")); map.put("tanh", singleArgRef().forOperator("$tanh")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index 9a77d093c4..55d1647568 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -110,6 +110,34 @@ void rendersSinhWithValueInDegrees() { .isEqualTo(Document.parse("{ $sinh : { $degreesToRadians : \"$angle\" } }")); } + @Test // GH-3710 + void rendersCos() { + + assertThat(valueOf("angle").cos().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $cos : \"$angle\" }")); + } + + @Test // GH-3710 + void rendersCosWithValueInDegrees() { + + assertThat(valueOf("angle").cos(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $cos : { $degreesToRadians : \"$angle\" } }")); + } + + @Test // GH-3710 + void rendersCosh() { + + assertThat(valueOf("angle").cosh().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $cosh : \"$angle\" }")); + } + + @Test // GH-3710 + void rendersCoshWithValueInDegrees() { + + assertThat(valueOf("angle").cosh(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $cosh : { $degreesToRadians : \"$angle\" } }")); + } + @Test // GH-3730 void rendersTan() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index cc59a91700..e30f7f9fb9 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1014,6 +1014,16 @@ void shouldRenderSinh() { assertThat(transform("sinh(angle)")).isEqualTo(Document.parse("{ \"$sinh\" : \"$angle\"}")); } + @Test // GH-3710 + void shouldRenderCos() { + assertThat(transform("cos(angle)")).isEqualTo(Document.parse("{ \"$cos\" : \"$angle\"}")); + } + + @Test // GH-3710 + void shouldRenderCosh() { + assertThat(transform("cosh(angle)")).isEqualTo(Document.parse("{ \"$cosh\" : \"$angle\"}")); + } + @Test // GH-3730 void shouldRenderTan() { assertThat(transform("tan(angle)")).isEqualTo(Document.parse("{ \"$tan\" : \"$angle\"}")); From df0372eee1368a5b0c03de1ad3bda9e1aaecf9e7 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 24 Aug 2021 16:11:53 +0200 Subject: [PATCH 096/983] Polishing. Rename AngularDimension to AngularUnit. Tweak Javadoc. Simplify tests. Update reference docs. See: #3710, #3714, #3728, #3730 Original pull request: #3755. --- .../core/aggregation/ArithmeticOperators.java | 244 ++++++----- .../core/aggregation/ConvertOperators.java | 3 +- .../ArithmeticOperatorsUnitTests.java | 36 +- .../SpelExpressionTransformerUnitTests.java | 402 +++++++++--------- .../reference/aggregation-framework.adoc | 4 +- 5 files changed, 355 insertions(+), 334 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 4de258b4eb..7896486abf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -678,34 +678,37 @@ public Round roundToPlace(int place) { } /** - * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in {@link AngularDimension#RADIANS radians}. + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. * * @return new instance of {@link Sin}. * @since 3.3 */ public Sin sin() { - return sin(AngularDimension.RADIANS); + return sin(AngularUnit.RADIANS); } /** - * Creates new {@link AggregationExpression} that calculates the sine of a numeric value in the given {@link AngularDimension unit}. + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value in the given + * {@link AngularUnit unit}. * * @param unit the unit of measure. * @return new instance of {@link Sin}. * @since 3.3 */ - public Sin sin(AngularDimension unit) { + public Sin sin(AngularUnit unit) { return usesFieldRef() ? Sin.sinOf(fieldReference, unit) : Sin.sinOf(expression, unit); } /** - * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in {@link AngularDimension#RADIANS radians}. + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. * * @return new instance of {@link Sin}. * @since 3.3 */ public Sinh sinh() { - return sinh(AngularDimension.RADIANS); + return sinh(AngularUnit.RADIANS); } /** @@ -715,42 +718,42 @@ public Sinh sinh() { * @return new instance of {@link Sin}. * @since 3.3 */ - public Sinh sinh(AngularDimension unit) { + public Sinh sinh(AngularUnit unit) { return usesFieldRef() ? Sinh.sinhOf(fieldReference, unit) : Sinh.sinhOf(expression, unit); } /** * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value given in - * {@link AngularDimension#RADIANS radians}. + * {@link AngularUnit#RADIANS radians}. * * @return new instance of {@link Sin}. * @since 3.3 */ public Cos cos() { - return cos(AngularDimension.RADIANS); + return cos(AngularUnit.RADIANS); } /** * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param unit the unit of measure. * @return new instance of {@link Sin}. * @since 3.3 */ - public Cos cos(AngularDimension unit) { + public Cos cos(AngularUnit unit) { return usesFieldRef() ? Cos.cosOf(fieldReference, unit) : Cos.cosOf(expression, unit); } /** * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value given in - * {@link AngularDimension#RADIANS radians}. + * {@link AngularUnit#RADIANS radians}. * * @return new instance of {@link Sin}. * @since 3.3 */ public Cosh cosh() { - return cosh(AngularDimension.RADIANS); + return cosh(AngularUnit.RADIANS); } /** @@ -760,42 +763,42 @@ public Cosh cosh() { * @return new instance of {@link Sin}. * @since 3.3 */ - public Cosh cosh(AngularDimension unit) { + public Cosh cosh(AngularUnit unit) { return usesFieldRef() ? Cosh.coshOf(fieldReference, unit) : Cosh.coshOf(expression, unit); } /** * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value given in - * {@link AngularDimension#RADIANS radians}. + * {@link AngularUnit#RADIANS radians}. * * @return new instance of {@link Sin}. * @since 3.3 */ public Tan tan() { - return tan(AngularDimension.RADIANS); + return tan(AngularUnit.RADIANS); } /** * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param unit the unit of measure. * @return new instance of {@link Sin}. * @since 3.3 */ - public Tan tan(AngularDimension unit) { + public Tan tan(AngularUnit unit) { return usesFieldRef() ? Tan.tanOf(fieldReference, unit) : Tan.tanOf(expression, unit); } /** * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value given in - * {@link AngularDimension#RADIANS radians}. + * {@link AngularUnit#RADIANS radians}. * * @return new instance of {@link Sin}. * @since 3.3 */ public Tanh tanh() { - return tanh(AngularDimension.RADIANS); + return tanh(AngularUnit.RADIANS); } /** @@ -805,7 +808,7 @@ public Tanh tanh() { * @return new instance of {@link Sin}. * @since 3.3 */ - public Tanh tanh(AngularDimension unit) { + public Tanh tanh(AngularUnit unit) { return usesFieldRef() ? Tanh.tanhOf(fieldReference, unit) : Tanh.tanhOf(expression, unit); } @@ -2047,7 +2050,7 @@ protected String getMongoMethod() { * @author Christoph Strobl * @since 3.3 */ - public enum AngularDimension { + public enum AngularUnit { RADIANS, DEGREES } @@ -2065,76 +2068,82 @@ private Sin(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in - * {@link AngularDimension#RADIANS radians}. + * {@link AngularUnit#RADIANS radians}. *

        - * Use {@code sinhOf("angle", DEGREES)} as shortcut for

        { $sinh : { $degreesToRadians : "$angle" } }
        . + * Use {@code sinhOf("angle", DEGREES)} as shortcut for + * + *
        +		 * { $sinh : { $degreesToRadians : "$angle" } }
        +		 * 
        + * + * . * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link Sin}. */ public static Sin sinOf(String fieldReference) { - return sinOf(fieldReference, AngularDimension.RADIANS); + return sinOf(fieldReference, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Sin}. */ - public static Sin sinOf(String fieldReference, AngularDimension unit) { + public static Sin sinOf(String fieldReference, AngularUnit unit) { return sin(Fields.field(fieldReference), unit); } /** * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link Sin}. */ public static Sin sinOf(AggregationExpression expression) { - return sinOf(expression, AngularDimension.RADIANS); + return sinOf(expression, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Sin}. */ - public static Sin sinOf(AggregationExpression expression, AngularDimension unit) { + public static Sin sinOf(AggregationExpression expression, AngularUnit unit) { return sin(expression, unit); } /** * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value * @return new instance of {@link Sin}. */ public static Sin sin(Object value) { - return sin(value, AngularDimension.RADIANS); + return sin(value, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Sin}. */ - public static Sin sin(Object value, AngularDimension unit) { + public static Sin sin(Object value, AngularUnit unit) { - if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { return new Sin(ConvertOperators.DegreesToRadians.degreesToRadians(value)); } return new Sin(value); @@ -2148,7 +2157,7 @@ protected String getMongoMethod() { /** * An {@link AggregationExpression expression} that calculates the hyperbolic sine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @author Christoph Strobl * @since 3.3 @@ -2161,78 +2170,85 @@ private Sinh(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link Sin}. */ public static Sinh sinhOf(String fieldReference) { - return sinhOf(fieldReference, AngularDimension.RADIANS); + return sinhOf(fieldReference, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. *

        - * Use {@code sinhOf("angle", DEGREES)} as shortcut for

        { $sinh : { $degreesToRadians : "$angle" } }
        . + * Use {@code sinhOf("angle", DEGREES)} as shortcut for + * + *
        +		 * { $sinh : { $degreesToRadians : "$angle" } }
        +		 * 
        + * + * . * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Sin}. */ - public static Sinh sinhOf(String fieldReference, AngularDimension unit) { + public static Sinh sinhOf(String fieldReference, AngularUnit unit) { return sinh(Fields.field(fieldReference), unit); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. *

        - * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link Sin}. */ public static Sinh sinhOf(AggregationExpression expression) { - return sinhOf(expression, AngularDimension.RADIANS); + return sinhOf(expression, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Sin}. */ - public static Sinh sinhOf(AggregationExpression expression, AngularDimension unit) { + public static Sinh sinhOf(AggregationExpression expression, AngularUnit unit) { return sinh(expression, unit); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value. * @return new instance of {@link Sin}. */ public static Sinh sinh(Object value) { - return sinh(value, AngularDimension.RADIANS); + return sinh(value, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Sin}. */ - public static Sinh sinh(Object value, AngularDimension unit) { + public static Sinh sinh(Object value, AngularUnit unit) { - if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { return new Sinh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); } return new Sinh(value); @@ -2258,82 +2274,82 @@ private Cos(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in - * {@link AngularDimension#RADIANS radians}. + * {@link AngularUnit#RADIANS radians}. *

        * Use {@code cosOf("angle", DEGREES)} as shortcut for - * + * *

         		 * { $cos : { $degreesToRadians : "$angle" } }
         		 * 
        - * + * * . * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link Cos}. */ public static Cos cosOf(String fieldReference) { - return cosOf(fieldReference, AngularDimension.RADIANS); + return cosOf(fieldReference, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Cos}. */ - public static Cos cosOf(String fieldReference, AngularDimension unit) { + public static Cos cosOf(String fieldReference, AngularUnit unit) { return cos(Fields.field(fieldReference), unit); } /** * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link Cos}. */ public static Cos cosOf(AggregationExpression expression) { - return cosOf(expression, AngularDimension.RADIANS); + return cosOf(expression, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Cos}. */ - public static Cos cosOf(AggregationExpression expression, AngularDimension unit) { + public static Cos cosOf(AggregationExpression expression, AngularUnit unit) { return cos(expression, unit); } /** * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value * @return new instance of {@link Cos}. */ public static Cos cos(Object value) { - return cos(value, AngularDimension.RADIANS); + return cos(value, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Cos}. */ - public static Cos cos(Object value, AngularDimension unit) { + public static Cos cos(Object value, AngularUnit unit) { - if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { return new Cos(ConvertOperators.DegreesToRadians.degreesToRadians(value)); } return new Cos(value); @@ -2347,7 +2363,7 @@ protected String getMongoMethod() { /** * An {@link AggregationExpression expression} that calculates the hyperbolic cosine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @author Christoph Strobl * @since 3.3 @@ -2360,38 +2376,38 @@ private Cosh(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link Cosh}. */ public static Cosh coshOf(String fieldReference) { - return coshOf(fieldReference, AngularDimension.RADIANS); + return coshOf(fieldReference, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. *

        * Use {@code coshOf("angle", DEGREES)} as shortcut for - * + * *

         		 * { $cosh : { $degreesToRadians : "$angle" } }
         		 * 
        - * + * * . * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Cosh}. */ - public static Cosh coshOf(String fieldReference, AngularDimension unit) { + public static Cosh coshOf(String fieldReference, AngularUnit unit) { return cosh(Fields.field(fieldReference), unit); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. *

        * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. @@ -2400,45 +2416,45 @@ public static Cosh coshOf(String fieldReference, AngularDimension unit) { * @return new instance of {@link Cosh}. */ public static Cosh coshOf(AggregationExpression expression) { - return coshOf(expression, AngularDimension.RADIANS); + return coshOf(expression, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Cosh}. */ - public static Cosh coshOf(AggregationExpression expression, AngularDimension unit) { + public static Cosh coshOf(AggregationExpression expression, AngularUnit unit) { return cosh(expression, unit); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value. * @return new instance of {@link Cosh}. */ public static Cosh cosh(Object value) { - return cosh(value, AngularDimension.RADIANS); + return cosh(value, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Cosh}. */ - public static Cosh cosh(Object value, AngularDimension unit) { + public static Cosh cosh(Object value, AngularUnit unit) { - if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { return new Cosh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); } return new Cosh(value); @@ -2464,82 +2480,82 @@ private Tan(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in - * {@link AngularDimension#RADIANS radians}. + * {@link AngularUnit#RADIANS radians}. *

        * Use {@code tanOf("angle", DEGREES)} as shortcut for - * + * *

         		 * { $tan : { $degreesToRadians : "$angle" } }
         		 * 
        - * + * * . * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link Tan}. */ public static Tan tanOf(String fieldReference) { - return tanOf(fieldReference, AngularDimension.RADIANS); + return tanOf(fieldReference, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Tan}. */ - public static Tan tanOf(String fieldReference, AngularDimension unit) { + public static Tan tanOf(String fieldReference, AngularUnit unit) { return tan(Fields.field(fieldReference), unit); } /** * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link Tan}. */ public static Tan tanOf(AggregationExpression expression) { - return tanOf(expression, AngularDimension.RADIANS); + return tanOf(expression, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Tan}. */ - public static Tan tanOf(AggregationExpression expression, AngularDimension unit) { + public static Tan tanOf(AggregationExpression expression, AngularUnit unit) { return tan(expression, unit); } /** * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value * @return new instance of {@link Tan}. */ public static Tan tan(Object value) { - return tan(value, AngularDimension.RADIANS); + return tan(value, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given - * {@link AngularDimension unit}. + * {@link AngularUnit unit}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Tan}. */ - public static Tan tan(Object value, AngularDimension unit) { + public static Tan tan(Object value, AngularUnit unit) { - if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { return new Tan(ConvertOperators.DegreesToRadians.degreesToRadians(value)); } return new Tan(value); @@ -2553,7 +2569,7 @@ protected String getMongoMethod() { /** * An {@link AggregationExpression expression} that calculates the hyperbolic tangent of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @author Christoph Strobl * @since 3.3 @@ -2566,38 +2582,38 @@ private Tanh(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link Tanh}. */ public static Tanh tanhOf(String fieldReference) { - return tanhOf(fieldReference, AngularDimension.RADIANS); + return tanhOf(fieldReference, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. *

        * Use {@code tanhOf("angle", DEGREES)} as shortcut for - * + * *

         		 * { $tanh : { $degreesToRadians : "$angle" } }
         		 * 
        - * + * * . * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Tanh}. */ - public static Tanh tanhOf(String fieldReference, AngularDimension unit) { + public static Tanh tanhOf(String fieldReference, AngularUnit unit) { return tanh(Fields.field(fieldReference), unit); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. *

        * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. @@ -2606,45 +2622,45 @@ public static Tanh tanhOf(String fieldReference, AngularDimension unit) { * @return new instance of {@link Tanh}. */ public static Tanh tanhOf(AggregationExpression expression) { - return tanhOf(expression, AngularDimension.RADIANS); + return tanhOf(expression, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Tanh}. */ - public static Tanh tanhOf(AggregationExpression expression, AngularDimension unit) { + public static Tanh tanhOf(AggregationExpression expression, AngularUnit unit) { return tanh(expression, unit); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in - * {@link AngularDimension#RADIANS}. + * {@link AngularUnit#RADIANS}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value. * @return new instance of {@link Tanh}. */ public static Tanh tanh(Object value) { - return tanh(value, AngularDimension.RADIANS); + return tanh(value, AngularUnit.RADIANS); } /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in - * the given {@link AngularDimension unit}. + * the given {@link AngularUnit unit}. * * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a * numeric value * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Tanh}. */ - public static Tanh tanh(Object value, AngularDimension unit) { + public static Tanh tanh(Object value, AngularUnit unit) { - if (ObjectUtils.nullSafeEquals(AngularDimension.DEGREES, unit)) { + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { return new Tanh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); } return new Tanh(value); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java index b34933444a..637ebd8d8f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java @@ -232,7 +232,8 @@ public ToString convertToString() { } /** - * {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to radians.\ + * {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to + * radians. * * @return new instance of {@link DegreesToRadians}. * @since 3.3 diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index 55d1647568..7cde7cd1c4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -86,84 +86,84 @@ void rendersIntegralWithUnit() { void rendersSin() { assertThat(valueOf("angle").sin().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $sin : \"$angle\" }")); + .isEqualTo("{ $sin : \"$angle\" }"); } @Test // GH-3728 void rendersSinWithValueInDegrees() { - assertThat(valueOf("angle").sin(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $sin : { $degreesToRadians : \"$angle\" } }")); + assertThat(valueOf("angle").sin(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $sin : { $degreesToRadians : \"$angle\" } }"); } @Test // GH-3728 void rendersSinh() { assertThat(valueOf("angle").sinh().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $sinh : \"$angle\" }")); + .isEqualTo("{ $sinh : \"$angle\" }"); } @Test // GH-3728 void rendersSinhWithValueInDegrees() { - assertThat(valueOf("angle").sinh(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $sinh : { $degreesToRadians : \"$angle\" } }")); + assertThat(valueOf("angle").sinh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $sinh : { $degreesToRadians : \"$angle\" } }"); } @Test // GH-3710 void rendersCos() { assertThat(valueOf("angle").cos().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $cos : \"$angle\" }")); + .isEqualTo("{ $cos : \"$angle\" }"); } @Test // GH-3710 void rendersCosWithValueInDegrees() { - assertThat(valueOf("angle").cos(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $cos : { $degreesToRadians : \"$angle\" } }")); + assertThat(valueOf("angle").cos(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $cos : { $degreesToRadians : \"$angle\" } }"); } @Test // GH-3710 void rendersCosh() { assertThat(valueOf("angle").cosh().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $cosh : \"$angle\" }")); + .isEqualTo("{ $cosh : \"$angle\" }"); } @Test // GH-3710 void rendersCoshWithValueInDegrees() { - assertThat(valueOf("angle").cosh(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $cosh : { $degreesToRadians : \"$angle\" } }")); + assertThat(valueOf("angle").cosh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $cosh : { $degreesToRadians : \"$angle\" } }"); } @Test // GH-3730 void rendersTan() { assertThat(valueOf("angle").tan().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $tan : \"$angle\" }")); + .isEqualTo("{ $tan : \"$angle\" }"); } @Test // GH-3730 void rendersTanWithValueInDegrees() { - assertThat(valueOf("angle").tan(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $tan : { $degreesToRadians : \"$angle\" } }")); + assertThat(valueOf("angle").tan(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $tan : { $degreesToRadians : \"$angle\" } }"); } @Test // GH-3730 void rendersTanh() { assertThat(valueOf("angle").tanh().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $tanh : \"$angle\" }")); + .isEqualTo("{ $tanh : \"$angle\" }"); } @Test // GH-3730 void rendersTanhWithValueInDegrees() { - assertThat(valueOf("angle").tanh(AngularDimension.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $tanh : { $degreesToRadians : \"$angle\" } }")); + assertThat(valueOf("angle").tanh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $tanh : { $degreesToRadians : \"$angle\" } }"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index e30f7f9fb9..193ffb520d 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -15,7 +15,7 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; import java.util.Arrays; @@ -53,21 +53,21 @@ void beforeEach() { @Test // DATAMONGO-774 void shouldRenderConstantExpression() { - assertThat(transform("1")).isEqualTo("1"); - assertThat(transform("-1")).isEqualTo("-1"); - assertThat(transform("1.0")).isEqualTo("1.0"); - assertThat(transform("-1.0")).isEqualTo("-1.0"); - assertThat(transform("null")).isNull(); + assertThat(transformValue("1")).isEqualTo("1"); + assertThat(transformValue("-1")).isEqualTo("-1"); + assertThat(transformValue("1.0")).isEqualTo("1.0"); + assertThat(transformValue("-1.0")).isEqualTo("-1.0"); + assertThat(transformValue("null")).isNull(); } @Test // DATAMONGO-774 void shouldSupportKnownOperands() { - assertThat(transform("a + b")).isEqualTo(Document.parse("{ \"$add\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a - b")).isEqualTo(Document.parse("{ \"$subtract\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a * b")).isEqualTo(Document.parse("{ \"$multiply\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a / b")).isEqualTo(Document.parse("{ \"$divide\" : [ \"$a\" , \"$b\"]}")); - assertThat(transform("a % b")).isEqualTo(Document.parse("{ \"$mod\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("a + b")).isEqualTo("{ \"$add\" : [ \"$a\" , \"$b\"]}"); + assertThat(transform("a - b")).isEqualTo("{ \"$subtract\" : [ \"$a\" , \"$b\"]}"); + assertThat(transform("a * b")).isEqualTo("{ \"$multiply\" : [ \"$a\" , \"$b\"]}"); + assertThat(transform("a / b")).isEqualTo("{ \"$divide\" : [ \"$a\" , \"$b\"]}"); + assertThat(transform("a % b")).isEqualTo("{ \"$mod\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-774 @@ -77,35 +77,35 @@ void shouldThrowExceptionOnUnknownOperand() { @Test // DATAMONGO-774 void shouldRenderSumExpression() { - assertThat(transform("a + 1")).isEqualTo(Document.parse("{ \"$add\" : [ \"$a\" , 1]}")); + assertThat(transform("a + 1")).isEqualTo("{ \"$add\" : [ \"$a\" , 1]}"); } @Test // DATAMONGO-774 void shouldRenderFormula() { - assertThat(transform("(netPrice + surCharge) * taxrate + 42")).isEqualTo(Document.parse( - "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}")); + assertThat(transform("(netPrice + surCharge) * taxrate + 42")).isEqualTo( + "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}"); } @Test // DATAMONGO-774 void shouldRenderFormulaInCurlyBrackets() { - assertThat(transform("{(netPrice + surCharge) * taxrate + 42}")).isEqualTo(Document.parse( - "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}")); + assertThat(transform("{(netPrice + surCharge) * taxrate + 42}")).isEqualTo( + "{ \"$add\" : [ { \"$multiply\" : [ { \"$add\" : [ \"$netPrice\" , \"$surCharge\"]} , \"$taxrate\"]} , 42]}"); } @Test // DATAMONGO-774 void shouldRenderFieldReference() { - assertThat(transform("foo")).isEqualTo("$foo"); - assertThat(transform("$foo")).isEqualTo("$foo"); + assertThat(transformValue("foo")).isEqualTo("$foo"); + assertThat(transformValue("$foo")).isEqualTo("$foo"); } @Test // DATAMONGO-774 void shouldRenderNestedFieldReference() { - assertThat(transform("foo.bar")).isEqualTo("$foo.bar"); - assertThat(transform("$foo.bar")).isEqualTo("$foo.bar"); + assertThat(transformValue("foo.bar")).isEqualTo("$foo.bar"); + assertThat(transformValue("$foo.bar")).isEqualTo("$foo.bar"); } @Test // DATAMONGO-774 @@ -113,52 +113,52 @@ void shouldRenderNestedFieldReference() { void shouldRenderNestedIndexedFieldReference() { // TODO add support for rendering nested indexed field references - assertThat(transform("foo[3].bar")).isEqualTo("$foo[3].bar"); + assertThat(transformValue("foo[3].bar")).isEqualTo("$foo[3].bar"); } @Test // DATAMONGO-774 void shouldRenderConsecutiveOperation() { - assertThat(transform("1 + 1 + 1")).isEqualTo(Document.parse("{ \"$add\" : [ 1 , 1 , 1]}")); + assertThat(transform("1 + 1 + 1")).isEqualTo("{ \"$add\" : [ 1 , 1 , 1]}"); } @Test // DATAMONGO-774 void shouldRenderComplexExpression0() { assertThat(transform("-(1 + q)")) - .isEqualTo(Document.parse("{ \"$multiply\" : [ -1 , { \"$add\" : [ 1 , \"$q\"]}]}")); + .isEqualTo("{ \"$multiply\" : [ -1 , { \"$add\" : [ 1 , \"$q\"]}]}"); } @Test // DATAMONGO-774 void shouldRenderComplexExpression1() { - assertThat(transform("1 + (q + 1) / (q - 1)")).isEqualTo(Document.parse( - "{ \"$add\" : [ 1 , { \"$divide\" : [ { \"$add\" : [ \"$q\" , 1]} , { \"$subtract\" : [ \"$q\" , 1]}]}]}")); + assertThat(transform("1 + (q + 1) / (q - 1)")).isEqualTo( + "{ \"$add\" : [ 1 , { \"$divide\" : [ { \"$add\" : [ \"$q\" , 1]} , { \"$subtract\" : [ \"$q\" , 1]}]}]}"); } @Test // DATAMONGO-774 void shouldRenderComplexExpression2() { - assertThat(transform("(q + 1 + 4 - 5) / (q + 1 + 3 + 4)")).isEqualTo(Document.parse( - "{ \"$divide\" : [ { \"$subtract\" : [ { \"$add\" : [ \"$q\" , 1 , 4]} , 5]} , { \"$add\" : [ \"$q\" , 1 , 3 , 4]}]}")); + assertThat(transform("(q + 1 + 4 - 5) / (q + 1 + 3 + 4)")).isEqualTo( + "{ \"$divide\" : [ { \"$subtract\" : [ { \"$add\" : [ \"$q\" , 1 , 4]} , 5]} , { \"$add\" : [ \"$q\" , 1 , 3 , 4]}]}"); } @Test // DATAMONGO-774 void shouldRenderBinaryExpressionWithMixedSignsCorrectly() { - assertThat(transform("-4 + 1")).isEqualTo(Document.parse("{ \"$add\" : [ -4 , 1]}")); - assertThat(transform("1 + -4")).isEqualTo(Document.parse("{ \"$add\" : [ 1 , -4]}")); + assertThat(transform("-4 + 1")).isEqualTo("{ \"$add\" : [ -4 , 1]}"); + assertThat(transform("1 + -4")).isEqualTo("{ \"$add\" : [ 1 , -4]}"); } @Test // DATAMONGO-774 void shouldRenderConsecutiveOperationsInComplexExpression() { assertThat(transform("1 + 1 + (1 + 1 + 1) / q")) - .isEqualTo(Document.parse("{ \"$add\" : [ 1 , 1 , { \"$divide\" : [ { \"$add\" : [ 1 , 1 , 1]} , \"$q\"]}]}")); + .isEqualTo("{ \"$add\" : [ 1 , 1 , { \"$divide\" : [ { \"$add\" : [ 1 , 1 , 1]} , \"$q\"]}]}"); } @Test // DATAMONGO-774 void shouldRenderParameterExpressionResults() { - assertThat(transform("[0] + [1] + [2]", 1, 2, 3)).isEqualTo(Document.parse("{ \"$add\" : [ 1 , 2 , 3]}")); + assertThat(transform("[0] + [1] + [2]", 1, 2, 3)).isEqualTo("{ \"$add\" : [ 1 , 2 , 3]}"); } @Test // DATAMONGO-774 @@ -189,852 +189,856 @@ void shouldRenderCompoundExpressionsWithIndexerAndFieldReference() { Person person = new Person(); person.setAge(10); - assertThat(transform("[0].age + a.c", person)).isEqualTo(Document.parse("{ \"$add\" : [ 10 , \"$a.c\"] }")); + assertThat(transform("[0].age + a.c", person)).isEqualTo("{ \"$add\" : [ 10 , \"$a.c\"] }"); } @Test // DATAMONGO-840 void shouldRenderCompoundExpressionsWithOnlyFieldReferences() { - assertThat(transform("a.b + a.c")).isEqualTo(Document.parse("{ \"$add\" : [ \"$a.b\" , \"$a.c\"]}")); + assertThat(transform("a.b + a.c")).isEqualTo("{ \"$add\" : [ \"$a.b\" , \"$a.c\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeAnd() { - assertThat(transform("and(a, b)")).isEqualTo(Document.parse("{ \"$and\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("and(a, b)")).isEqualTo("{ \"$and\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeOr() { - assertThat(transform("or(a, b)")).isEqualTo(Document.parse("{ \"$or\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("or(a, b)")).isEqualTo("{ \"$or\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeNot() { - assertThat(transform("not(a)")).isEqualTo(Document.parse("{ \"$not\" : [ \"$a\"]}")); + assertThat(transform("not(a)")).isEqualTo("{ \"$not\" : [ \"$a\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeSetEquals() { - assertThat(transform("setEquals(a, b)")).isEqualTo(Document.parse("{ \"$setEquals\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("setEquals(a, b)")).isEqualTo("{ \"$setEquals\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeSetEqualsForArrays() { assertThat(transform("setEquals(new int[]{1,2,3}, new int[]{4,5,6})")) - .isEqualTo(Document.parse("{ \"$setEquals\" : [ [ 1 , 2 , 3] , [ 4 , 5 , 6]]}")); + .isEqualTo("{ \"$setEquals\" : [ [ 1 , 2 , 3] , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeSetEqualsMixedArrays() { assertThat(transform("setEquals(a, new int[]{4,5,6})")) - .isEqualTo(Document.parse("{ \"$setEquals\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo("{ \"$setEquals\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceSetIntersection() { assertThat(transform("setIntersection(a, new int[]{4,5,6})")) - .isEqualTo(Document.parse("{ \"$setIntersection\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo("{ \"$setIntersection\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceSetUnion() { assertThat(transform("setUnion(a, new int[]{4,5,6})")) - .isEqualTo(Document.parse("{ \"$setUnion\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo("{ \"$setUnion\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceSeDifference() { assertThat(transform("setDifference(a, new int[]{4,5,6})")) - .isEqualTo(Document.parse("{ \"$setDifference\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo("{ \"$setDifference\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceSetIsSubset() { assertThat(transform("setIsSubset(a, new int[]{4,5,6})")) - .isEqualTo(Document.parse("{ \"$setIsSubset\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo("{ \"$setIsSubset\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceAnyElementTrue() { - assertThat(transform("anyElementTrue(a)")).isEqualTo(Document.parse("{ \"$anyElementTrue\" : [ \"$a\"]}")); + assertThat(transform("anyElementTrue(a)")).isEqualTo("{ \"$anyElementTrue\" : [ \"$a\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceAllElementsTrue() { assertThat(transform("allElementsTrue(a, new int[]{4,5,6})")) - .isEqualTo(Document.parse("{ \"$allElementsTrue\" : [ \"$a\" , [ 4 , 5 , 6]]}")); + .isEqualTo("{ \"$allElementsTrue\" : [ \"$a\" , [ 4 , 5 , 6]]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceCmp() { - assertThat(transform("cmp(a, 250)")).isEqualTo(Document.parse("{ \"$cmp\" : [ \"$a\" , 250]}")); + assertThat(transform("cmp(a, 250)")).isEqualTo("{ \"$cmp\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceEq() { - assertThat(transform("eq(a, 250)")).isEqualTo(Document.parse("{ \"$eq\" : [ \"$a\" , 250]}")); + assertThat(transform("eq(a, 250)")).isEqualTo("{ \"$eq\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceGt() { - assertThat(transform("gt(a, 250)")).isEqualTo(Document.parse("{ \"$gt\" : [ \"$a\" , 250]}")); + assertThat(transform("gt(a, 250)")).isEqualTo("{ \"$gt\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceGte() { - assertThat(transform("gte(a, 250)")).isEqualTo(Document.parse("{ \"$gte\" : [ \"$a\" , 250]}")); + assertThat(transform("gte(a, 250)")).isEqualTo("{ \"$gte\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceLt() { - assertThat(transform("lt(a, 250)")).isEqualTo(Document.parse("{ \"$lt\" : [ \"$a\" , 250]}")); + assertThat(transform("lt(a, 250)")).isEqualTo("{ \"$lt\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceLte() { - assertThat(transform("lte(a, 250)")).isEqualTo(Document.parse("{ \"$lte\" : [ \"$a\" , 250]}")); + assertThat(transform("lte(a, 250)")).isEqualTo("{ \"$lte\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNe() { - assertThat(transform("ne(a, 250)")).isEqualTo(Document.parse("{ \"$ne\" : [ \"$a\" , 250]}")); + assertThat(transform("ne(a, 250)")).isEqualTo("{ \"$ne\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceAbs() { - assertThat(transform("abs(1)")).isEqualTo(Document.parse("{ \"$abs\" : 1}")); + assertThat(transform("abs(1)")).isEqualTo("{ \"$abs\" : 1}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceAdd() { - assertThat(transform("add(a, 250)")).isEqualTo(Document.parse("{ \"$add\" : [ \"$a\" , 250]}")); + assertThat(transform("add(a, 250)")).isEqualTo("{ \"$add\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceCeil() { - assertThat(transform("ceil(7.8)")).isEqualTo(Document.parse("{ \"$ceil\" : 7.8}")); + assertThat(transform("ceil(7.8)")).isEqualTo("{ \"$ceil\" : 7.8}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceDivide() { - assertThat(transform("divide(a, 250)")).isEqualTo(Document.parse("{ \"$divide\" : [ \"$a\" , 250]}")); + assertThat(transform("divide(a, 250)")).isEqualTo("{ \"$divide\" : [ \"$a\" , 250]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceExp() { - assertThat(transform("exp(2)")).isEqualTo(Document.parse("{ \"$exp\" : 2}")); + assertThat(transform("exp(2)")).isEqualTo("{ \"$exp\" : 2}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceFloor() { - assertThat(transform("floor(2)")).isEqualTo(Document.parse("{ \"$floor\" : 2}")); + assertThat(transform("floor(2)")).isEqualTo("{ \"$floor\" : 2}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceLn() { - assertThat(transform("ln(2)")).isEqualTo(Document.parse("{ \"$ln\" : 2}")); + assertThat(transform("ln(2)")).isEqualTo("{ \"$ln\" : 2}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceLog() { - assertThat(transform("log(100, 10)")).isEqualTo(Document.parse("{ \"$log\" : [ 100 , 10]}")); + assertThat(transform("log(100, 10)")).isEqualTo("{ \"$log\" : [ 100 , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceLog10() { - assertThat(transform("log10(100)")).isEqualTo(Document.parse("{ \"$log10\" : 100}")); + assertThat(transform("log10(100)")).isEqualTo("{ \"$log10\" : 100}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeMod() { - assertThat(transform("mod(a, b)")).isEqualTo(Document.parse("{ \"$mod\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("mod(a, b)")).isEqualTo("{ \"$mod\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeMultiply() { - assertThat(transform("multiply(a, b)")).isEqualTo(Document.parse("{ \"$multiply\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("multiply(a, b)")).isEqualTo("{ \"$multiply\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodePow() { - assertThat(transform("pow(a, 2)")).isEqualTo(Document.parse("{ \"$pow\" : [ \"$a\" , 2]}")); + assertThat(transform("pow(a, 2)")).isEqualTo("{ \"$pow\" : [ \"$a\" , 2]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceSqrt() { - assertThat(transform("sqrt(2)")).isEqualTo(Document.parse("{ \"$sqrt\" : 2}")); + assertThat(transform("sqrt(2)")).isEqualTo("{ \"$sqrt\" : 2}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeSubtract() { - assertThat(transform("subtract(a, b)")).isEqualTo(Document.parse("{ \"$subtract\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("subtract(a, b)")).isEqualTo("{ \"$subtract\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceTrunc() { - assertThat(transform("trunc(2.1)")).isEqualTo(Document.parse("{ \"$trunc\" : 2.1}")); + assertThat(transform("trunc(2.1)")).isEqualTo("{ \"$trunc\" : 2.1}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeConcat() { - assertThat(transform("concat(a, b, 'c')")).isEqualTo(Document.parse("{ \"$concat\" : [ \"$a\" , \"$b\" , \"c\"]}")); + assertThat(transform("concat(a, b, 'c')")).isEqualTo("{ \"$concat\" : [ \"$a\" , \"$b\" , \"c\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeSubstrc() { - assertThat(transform("substr(a, 0, 1)")).isEqualTo(Document.parse("{ \"$substr\" : [ \"$a\" , 0 , 1]}")); + assertThat(transform("substr(a, 0, 1)")).isEqualTo("{ \"$substr\" : [ \"$a\" , 0 , 1]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceToLower() { - assertThat(transform("toLower(a)")).isEqualTo(Document.parse("{ \"$toLower\" : \"$a\"}")); + assertThat(transform("toLower(a)")).isEqualTo("{ \"$toLower\" : \"$a\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceToUpper() { - assertThat(transform("toUpper(a)")).isEqualTo(Document.parse("{ \"$toUpper\" : \"$a\"}")); + assertThat(transform("toUpper(a)")).isEqualTo("{ \"$toUpper\" : \"$a\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeStrCaseCmp() { - assertThat(transform("strcasecmp(a, b)")).isEqualTo(Document.parse("{ \"$strcasecmp\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("strcasecmp(a, b)")).isEqualTo("{ \"$strcasecmp\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceMeta() { - assertThat(transform("meta('textScore')")).isEqualTo(Document.parse("{ \"$meta\" : \"textScore\"}")); + assertThat(transform("meta('textScore')")).isEqualTo("{ \"$meta\" : \"textScore\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeArrayElemAt() { - assertThat(transform("arrayElemAt(a, 10)")).isEqualTo(Document.parse("{ \"$arrayElemAt\" : [ \"$a\" , 10]}")); + assertThat(transform("arrayElemAt(a, 10)")).isEqualTo("{ \"$arrayElemAt\" : [ \"$a\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeConcatArrays() { assertThat(transform("concatArrays(a, b, c)")) - .isEqualTo(Document.parse("{ \"$concatArrays\" : [ \"$a\" , \"$b\" , \"$c\"]}")); + .isEqualTo("{ \"$concatArrays\" : [ \"$a\" , \"$b\" , \"$c\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeFilter() { - assertThat(transform("filter(a, 'num', '$$num' > 10)")).isEqualTo(Document.parse( - "{ \"$filter\" : { \"input\" : \"$a\" , \"as\" : \"num\" , \"cond\" : { \"$gt\" : [ \"$$num\" , 10]}}}")); + assertThat(transform("filter(a, 'num', '$$num' > 10)")).isEqualTo( + "{ \"$filter\" : { \"input\" : \"$a\" , \"as\" : \"num\" , \"cond\" : { \"$gt\" : [ \"$$num\" , 10]}}}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceIsArray() { - assertThat(transform("isArray(a)")).isEqualTo(Document.parse("{ \"$isArray\" : \"$a\"}")); + assertThat(transform("isArray(a)")).isEqualTo("{ \"$isArray\" : \"$a\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceIsSize() { - assertThat(transform("size(a)")).isEqualTo(Document.parse("{ \"$size\" : \"$a\"}")); + assertThat(transform("size(a)")).isEqualTo("{ \"$size\" : \"$a\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeSlice() { - assertThat(transform("slice(a, 10)")).isEqualTo(Document.parse("{ \"$slice\" : [ \"$a\" , 10]}")); + assertThat(transform("slice(a, 10)")).isEqualTo("{ \"$slice\" : [ \"$a\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeMap() { - assertThat(transform("map(quizzes, 'grade', '$$grade' + 2)")).isEqualTo(Document.parse( - "{ \"$map\" : { \"input\" : \"$quizzes\" , \"as\" : \"grade\" , \"in\" : { \"$add\" : [ \"$$grade\" , 2]}}}")); + assertThat(transform("map(quizzes, 'grade', '$$grade' + 2)")).isEqualTo( + "{ \"$map\" : { \"input\" : \"$quizzes\" , \"as\" : \"grade\" , \"in\" : { \"$add\" : [ \"$$grade\" , 2]}}}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeLet() { - assertThat(transform("let({low:1, high:'$$low'}, gt('$$low', '$$high'))")).isEqualTo(Document.parse( - "{ \"$let\" : { \"vars\" : { \"low\" : 1 , \"high\" : \"$$low\"} , \"in\" : { \"$gt\" : [ \"$$low\" , \"$$high\"]}}}")); + assertThat(transform("let({low:1, high:'$$low'}, gt('$$low', '$$high'))")).isEqualTo( + "{ \"$let\" : { \"vars\" : { \"low\" : 1 , \"high\" : \"$$low\"} , \"in\" : { \"$gt\" : [ \"$$low\" , \"$$high\"]}}}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceLiteral() { - assertThat(transform("literal($1)")).isEqualTo(Document.parse("{ \"$literal\" : \"$1\"}")); + assertThat(transform("literal($1)")).isEqualTo("{ \"$literal\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceDayOfYear() { - assertThat(transform("dayOfYear($1)")).isEqualTo(Document.parse("{ \"$dayOfYear\" : \"$1\"}")); + assertThat(transform("dayOfYear($1)")).isEqualTo("{ \"$dayOfYear\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceDayOfMonth() { - assertThat(transform("dayOfMonth($1)")).isEqualTo(Document.parse("{ \"$dayOfMonth\" : \"$1\"}")); + assertThat(transform("dayOfMonth($1)")).isEqualTo("{ \"$dayOfMonth\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceDayOfWeek() { - assertThat(transform("dayOfWeek($1)")).isEqualTo(Document.parse("{ \"$dayOfWeek\" : \"$1\"}")); + assertThat(transform("dayOfWeek($1)")).isEqualTo("{ \"$dayOfWeek\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceYear() { - assertThat(transform("year($1)")).isEqualTo(Document.parse("{ \"$year\" : \"$1\"}")); + assertThat(transform("year($1)")).isEqualTo("{ \"$year\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceMonth() { - assertThat(transform("month($1)")).isEqualTo(Document.parse("{ \"$month\" : \"$1\"}")); + assertThat(transform("month($1)")).isEqualTo("{ \"$month\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceWeek() { - assertThat(transform("week($1)")).isEqualTo(Document.parse("{ \"$week\" : \"$1\"}")); + assertThat(transform("week($1)")).isEqualTo("{ \"$week\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceHour() { - assertThat(transform("hour($1)")).isEqualTo(Document.parse("{ \"$hour\" : \"$1\"}")); + assertThat(transform("hour($1)")).isEqualTo("{ \"$hour\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceMinute() { - assertThat(transform("minute($1)")).isEqualTo(Document.parse("{ \"$minute\" : \"$1\"}")); + assertThat(transform("minute($1)")).isEqualTo("{ \"$minute\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceSecond() { - assertThat(transform("second($1)")).isEqualTo(Document.parse("{ \"$second\" : \"$1\"}")); + assertThat(transform("second($1)")).isEqualTo("{ \"$second\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceMillisecond() { - assertThat(transform("millisecond($1)")).isEqualTo(Document.parse("{ \"$millisecond\" : \"$1\"}")); + assertThat(transform("millisecond($1)")).isEqualTo("{ \"$millisecond\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceDateToString() { assertThat(transform("dateToString('%Y-%m-%d', $date)")) - .isEqualTo(Document.parse("{ \"$dateToString\" : { \"format\" : \"%Y-%m-%d\" , \"date\" : \"$date\"}}")); + .isEqualTo("{ \"$dateToString\" : { \"format\" : \"%Y-%m-%d\" , \"date\" : \"$date\"}}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceCond() { assertThat(transform("cond(qty > 250, 30, 20)")).isEqualTo( - Document.parse("{ \"$cond\" : { \"if\" : { \"$gt\" : [ \"$qty\" , 250]} , \"then\" : 30 , \"else\" : 20}}")); + "{ \"$cond\" : { \"if\" : { \"$gt\" : [ \"$qty\" , 250]} , \"then\" : 30 , \"else\" : 20}}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeIfNull() { - assertThat(transform("ifNull(a, 10)")).isEqualTo(Document.parse("{ \"$ifNull\" : [ \"$a\" , 10]}")); + assertThat(transform("ifNull(a, 10)")).isEqualTo("{ \"$ifNull\" : [ \"$a\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeSum() { - assertThat(transform("sum(a, b)")).isEqualTo(Document.parse("{ \"$sum\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("sum(a, b)")).isEqualTo("{ \"$sum\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeAvg() { - assertThat(transform("avg(a, b)")).isEqualTo(Document.parse("{ \"$avg\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("avg(a, b)")).isEqualTo("{ \"$avg\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceFirst() { - assertThat(transform("first($1)")).isEqualTo(Document.parse("{ \"$first\" : \"$1\"}")); + assertThat(transform("first($1)")).isEqualTo("{ \"$first\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceLast() { - assertThat(transform("last($1)")).isEqualTo(Document.parse("{ \"$last\" : \"$1\"}")); + assertThat(transform("last($1)")).isEqualTo("{ \"$last\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeMax() { - assertThat(transform("max(a, b)")).isEqualTo(Document.parse("{ \"$max\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("max(a, b)")).isEqualTo("{ \"$max\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeMin() { - assertThat(transform("min(a, b)")).isEqualTo(Document.parse("{ \"$min\" : [ \"$a\" , \"$b\"]}")); + assertThat(transform("min(a, b)")).isEqualTo("{ \"$min\" : [ \"$a\" , \"$b\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodePush() { assertThat(transform("push({'item':'$item', 'quantity':'$qty'})")) - .isEqualTo(Document.parse("{ \"$push\" : { \"item\" : \"$item\" , \"quantity\" : \"$qty\"}}")); + .isEqualTo("{ \"$push\" : { \"item\" : \"$item\" , \"quantity\" : \"$qty\"}}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceAddToSet() { - assertThat(transform("addToSet($1)")).isEqualTo(Document.parse("{ \"$addToSet\" : \"$1\"}")); + assertThat(transform("addToSet($1)")).isEqualTo("{ \"$addToSet\" : \"$1\"}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeStdDevPop() { assertThat(transform("stdDevPop(scores.score)")) - .isEqualTo(Document.parse("{ \"$stdDevPop\" : [ \"$scores.score\"]}")); + .isEqualTo("{ \"$stdDevPop\" : [ \"$scores.score\"]}"); } @Test // DATAMONGO-1530 void shouldRenderMethodReferenceNodeStdDevSamp() { - assertThat(transform("stdDevSamp(age)")).isEqualTo(Document.parse("{ \"$stdDevSamp\" : [ \"$age\"]}")); + assertThat(transform("stdDevSamp(age)")).isEqualTo("{ \"$stdDevSamp\" : [ \"$age\"]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodeEq() { - assertThat(transform("foo == 10")).isEqualTo(Document.parse("{ \"$eq\" : [ \"$foo\" , 10]}")); + assertThat(transform("foo == 10")).isEqualTo("{ \"$eq\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodeNe() { - assertThat(transform("foo != 10")).isEqualTo(Document.parse("{ \"$ne\" : [ \"$foo\" , 10]}")); + assertThat(transform("foo != 10")).isEqualTo("{ \"$ne\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodeGt() { - assertThat(transform("foo > 10")).isEqualTo(Document.parse("{ \"$gt\" : [ \"$foo\" , 10]}")); + assertThat(transform("foo > 10")).isEqualTo("{ \"$gt\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodeGte() { - assertThat(transform("foo >= 10")).isEqualTo(Document.parse("{ \"$gte\" : [ \"$foo\" , 10]}")); + assertThat(transform("foo >= 10")).isEqualTo("{ \"$gte\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodeLt() { - assertThat(transform("foo < 10")).isEqualTo(Document.parse("{ \"$lt\" : [ \"$foo\" , 10]}")); + assertThat(transform("foo < 10")).isEqualTo("{ \"$lt\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodeLte() { - assertThat(transform("foo <= 10")).isEqualTo(Document.parse("{ \"$lte\" : [ \"$foo\" , 10]}")); + assertThat(transform("foo <= 10")).isEqualTo("{ \"$lte\" : [ \"$foo\" , 10]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodePow() { - assertThat(transform("foo^2")).isEqualTo(Document.parse("{ \"$pow\" : [ \"$foo\" , 2]}")); + assertThat(transform("foo^2")).isEqualTo("{ \"$pow\" : [ \"$foo\" , 2]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodeOr() { - assertThat(transform("true || false")).isEqualTo(Document.parse("{ \"$or\" : [ true , false]}")); + assertThat(transform("true || false")).isEqualTo("{ \"$or\" : [ true , false]}"); } @Test // DATAMONGO-1530 void shouldRenderComplexOperationNodeOr() { assertThat(transform("1+2 || concat(a, b) || true")).isEqualTo( - Document.parse("{ \"$or\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}")); + "{ \"$or\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}"); } @Test // DATAMONGO-1530 void shouldRenderOperationNodeAnd() { - assertThat(transform("true && false")).isEqualTo(Document.parse("{ \"$and\" : [ true , false]}")); + assertThat(transform("true && false")).isEqualTo("{ \"$and\" : [ true , false]}"); } @Test // DATAMONGO-1530 void shouldRenderComplexOperationNodeAnd() { assertThat(transform("1+2 && concat(a, b) && true")).isEqualTo( - Document.parse("{ \"$and\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}")); + "{ \"$and\" : [ { \"$add\" : [ 1 , 2]} , { \"$concat\" : [ \"$a\" , \"$b\"]} , true]}"); } @Test // DATAMONGO-1530 void shouldRenderNotCorrectly() { - assertThat(transform("!true")).isEqualTo(Document.parse("{ \"$not\" : [ true]}")); + assertThat(transform("!true")).isEqualTo("{ \"$not\" : [ true]}"); } @Test // DATAMONGO-1530 void shouldRenderComplexNotCorrectly() { - assertThat(transform("!(foo > 10)")).isEqualTo(Document.parse("{ \"$not\" : [ { \"$gt\" : [ \"$foo\" , 10]}]}")); + assertThat(transform("!(foo > 10)")).isEqualTo("{ \"$not\" : [ { \"$gt\" : [ \"$foo\" , 10]}]}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceIndexOfBytes() { assertThat(transform("indexOfBytes(item, 'foo')")) - .isEqualTo(Document.parse("{ \"$indexOfBytes\" : [ \"$item\" , \"foo\"]}")); + .isEqualTo("{ \"$indexOfBytes\" : [ \"$item\" , \"foo\"]}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceIndexOfCP() { assertThat(transform("indexOfCP(item, 'foo')")) - .isEqualTo(Document.parse("{ \"$indexOfCP\" : [ \"$item\" , \"foo\"]}")); + .isEqualTo("{ \"$indexOfCP\" : [ \"$item\" , \"foo\"]}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceSplit() { - assertThat(transform("split(item, ',')")).isEqualTo(Document.parse("{ \"$split\" : [ \"$item\" , \",\"]}")); + assertThat(transform("split(item, ',')")).isEqualTo("{ \"$split\" : [ \"$item\" , \",\"]}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceStrLenBytes() { - assertThat(transform("strLenBytes(item)")).isEqualTo(Document.parse("{ \"$strLenBytes\" : \"$item\"}")); + assertThat(transform("strLenBytes(item)")).isEqualTo("{ \"$strLenBytes\" : \"$item\"}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceStrLenCP() { - assertThat(transform("strLenCP(item)")).isEqualTo(Document.parse("{ \"$strLenCP\" : \"$item\"}")); + assertThat(transform("strLenCP(item)")).isEqualTo("{ \"$strLenCP\" : \"$item\"}"); } @Test // DATAMONGO-1548 void shouldRenderMethodSubstrCP() { - assertThat(transform("substrCP(item, 0, 5)")).isEqualTo(Document.parse("{ \"$substrCP\" : [ \"$item\" , 0 , 5]}")); + assertThat(transform("substrCP(item, 0, 5)")).isEqualTo("{ \"$substrCP\" : [ \"$item\" , 0 , 5]}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceReverseArray() { - assertThat(transform("reverseArray(array)")).isEqualTo(Document.parse("{ \"$reverseArray\" : \"$array\"}")); + assertThat(transform("reverseArray(array)")).isEqualTo("{ \"$reverseArray\" : \"$array\"}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceReduce() { - assertThat(transform("reduce(field, '', {'$concat':{'$$value','$$this'}})")).isEqualTo(Document.parse( - "{ \"$reduce\" : { \"input\" : \"$field\" , \"initialValue\" : \"\" , \"in\" : { \"$concat\" : [ \"$$value\" , \"$$this\"]}}}")); + assertThat(transform("reduce(field, '', {'$concat':{'$$value','$$this'}})")).isEqualTo( + "{ \"$reduce\" : { \"input\" : \"$field\" , \"initialValue\" : \"\" , \"in\" : { \"$concat\" : [ \"$$value\" , \"$$this\"]}}}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceZip() { assertThat(transform("zip(new String[]{'$array1', '$array2'})")) - .isEqualTo(Document.parse("{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"]}}")); + .isEqualTo("{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"]}}"); } @Test // DATAMONGO-1548 void shouldRenderMethodReferenceZipWithOptionalArgs() { - assertThat(transform("zip(new String[]{'$array1', '$array2'}, true, new int[]{1,2})")).isEqualTo(Document.parse( - "{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"] , \"useLongestLength\" : true , \"defaults\" : [ 1 , 2]}}")); + assertThat(transform("zip(new String[]{'$array1', '$array2'}, true, new int[]{1,2})")).isEqualTo( + "{ \"$zip\" : { \"inputs\" : [ \"$array1\" , \"$array2\"] , \"useLongestLength\" : true , \"defaults\" : [ 1 , 2]}}"); } @Test // DATAMONGO-1548 void shouldRenderMethodIn() { - assertThat(transform("in('item', array)")).isEqualTo(Document.parse("{ \"$in\" : [ \"item\" , \"$array\"]}")); + assertThat(transform("in('item', array)")).isEqualTo("{ \"$in\" : [ \"item\" , \"$array\"]}"); } @Test // DATAMONGO-1548 void shouldRenderMethodRefereneIsoDayOfWeek() { - assertThat(transform("isoDayOfWeek(date)")).isEqualTo(Document.parse("{ \"$isoDayOfWeek\" : \"$date\"}")); + assertThat(transform("isoDayOfWeek(date)")).isEqualTo("{ \"$isoDayOfWeek\" : \"$date\"}"); } @Test // DATAMONGO-1548 void shouldRenderMethodRefereneIsoWeek() { - assertThat(transform("isoWeek(date)")).isEqualTo(Document.parse("{ \"$isoWeek\" : \"$date\"}")); + assertThat(transform("isoWeek(date)")).isEqualTo("{ \"$isoWeek\" : \"$date\"}"); } @Test // DATAMONGO-1548 void shouldRenderMethodRefereneIsoWeekYear() { - assertThat(transform("isoWeekYear(date)")).isEqualTo(Document.parse("{ \"$isoWeekYear\" : \"$date\"}")); + assertThat(transform("isoWeekYear(date)")).isEqualTo("{ \"$isoWeekYear\" : \"$date\"}"); } @Test // DATAMONGO-1548 void shouldRenderMethodRefereneType() { - assertThat(transform("type(a)")).isEqualTo(Document.parse("{ \"$type\" : \"$a\"}")); + assertThat(transform("type(a)")).isEqualTo("{ \"$type\" : \"$a\"}"); } @Test // DATAMONGO-2077 void shouldRenderArrayToObjectWithFieldReference() { - assertThat(transform("arrayToObject(field)")).isEqualTo(Document.parse("{ \"$arrayToObject\" : \"$field\"}")); + assertThat(transform("arrayToObject(field)")).isEqualTo("{ \"$arrayToObject\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderArrayToObjectWithArray() { assertThat(transform("arrayToObject(new String[]{'key', 'value'})")) - .isEqualTo(Document.parse("{ \"$arrayToObject\" : [\"key\", \"value\"]}")); + .isEqualTo("{ \"$arrayToObject\" : [\"key\", \"value\"]}"); } @Test // DATAMONGO-2077 void shouldRenderObjectToArrayWithFieldReference() { - assertThat(transform("objectToArray(field)")).isEqualTo(Document.parse("{ \"$objectToArray\" : \"$field\"}")); + assertThat(transform("objectToArray(field)")).isEqualTo("{ \"$objectToArray\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderMergeObjects() { assertThat(transform("mergeObjects(field1, $$ROOT)")) - .isEqualTo(Document.parse("{ \"$mergeObjects\" : [\"$field1\", \"$$ROOT\"]}")); + .isEqualTo("{ \"$mergeObjects\" : [\"$field1\", \"$$ROOT\"]}"); } @Test // DATAMONGO-2077 void shouldRenderTrimWithoutChars() { - assertThat(transform("trim(field)")).isEqualTo(Document.parse("{ \"$trim\" : {\"input\" : \"$field\"}}")); + assertThat(transform("trim(field)")).isEqualTo("{ \"$trim\" : {\"input\" : \"$field\"}}"); } @Test // DATAMONGO-2077 void shouldRenderTrimWithChars() { assertThat(transform("trim(field, 'ie')")) - .isEqualTo(Document.parse("{ \"$trim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}")); + .isEqualTo("{ \"$trim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}"); } @Test // DATAMONGO-2077 void shouldRenderTrimWithCharsFromFieldReference() { assertThat(transform("trim(field1, field2)")) - .isEqualTo(Document.parse("{ \"$trim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}")); + .isEqualTo("{ \"$trim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}"); } @Test // DATAMONGO-2077 void shouldRenderLtrimWithoutChars() { - assertThat(transform("ltrim(field)")).isEqualTo(Document.parse("{ \"$ltrim\" : {\"input\" : \"$field\"}}")); + assertThat(transform("ltrim(field)")).isEqualTo("{ \"$ltrim\" : {\"input\" : \"$field\"}}"); } @Test // DATAMONGO-2077 void shouldRenderLtrimWithChars() { assertThat(transform("ltrim(field, 'ie')")) - .isEqualTo(Document.parse("{ \"$ltrim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}")); + .isEqualTo("{ \"$ltrim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}"); } @Test // DATAMONGO-2077 void shouldRenderLtrimWithCharsFromFieldReference() { assertThat(transform("ltrim(field1, field2)")) - .isEqualTo(Document.parse("{ \"$ltrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}")); + .isEqualTo("{ \"$ltrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}"); } @Test // DATAMONGO-2077 void shouldRenderRtrimWithoutChars() { - assertThat(transform("rtrim(field)")).isEqualTo(Document.parse("{ \"$rtrim\" : {\"input\" : \"$field\"}}")); + assertThat(transform("rtrim(field)")).isEqualTo("{ \"$rtrim\" : {\"input\" : \"$field\"}}"); } @Test // DATAMONGO-2077 void shouldRenderRtrimWithChars() { assertThat(transform("rtrim(field, 'ie')")) - .isEqualTo(Document.parse("{ \"$rtrim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}")); + .isEqualTo("{ \"$rtrim\" : {\"input\" : \"$field\", \"chars\" : \"ie\" }}"); } @Test // DATAMONGO-2077 void shouldRenderRtrimWithCharsFromFieldReference() { assertThat(transform("rtrim(field1, field2)")) - .isEqualTo(Document.parse("{ \"$rtrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}")); + .isEqualTo("{ \"$rtrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}"); } @Test // DATAMONGO-2077 void shouldRenderConvertWithoutOptionalParameters() { assertThat(transform("convert(field, 'string')")) - .isEqualTo(Document.parse("{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"string\" }}")); + .isEqualTo("{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"string\" }}"); } @Test // DATAMONGO-2077 void shouldRenderConvertWithOnError() { - assertThat(transform("convert(field, 'int', 'Not an integer.')")).isEqualTo(Document - .parse("{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"int\", \"onError\" : \"Not an integer.\" }}")); + assertThat(transform("convert(field, 'int', 'Not an integer.')")) + .isEqualTo("{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"int\", \"onError\" : \"Not an integer.\" }}"); } @Test // DATAMONGO-2077 void shouldRenderConvertWithOnErrorOnNull() { - assertThat(transform("convert(field, 'int', 'Not an integer.', -1)")).isEqualTo(Document.parse( - "{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"int\", \"onError\" : \"Not an integer.\", \"onNull\" : -1 }}")); + assertThat(transform("convert(field, 'int', 'Not an integer.', -1)")).isEqualTo( + "{ \"$convert\" : {\"input\" : \"$field\", \"to\" : \"int\", \"onError\" : \"Not an integer.\", \"onNull\" : -1 }}"); } @Test // DATAMONGO-2077 void shouldRenderToBool() { - assertThat(transform("toBool(field)")).isEqualTo(Document.parse("{ \"$toBool\" : \"$field\"}")); + assertThat(transform("toBool(field)")).isEqualTo("{ \"$toBool\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderToDate() { - assertThat(transform("toDate(field)")).isEqualTo(Document.parse("{ \"$toDate\" : \"$field\"}")); + assertThat(transform("toDate(field)")).isEqualTo("{ \"$toDate\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderToDecimal() { - assertThat(transform("toDecimal(field)")).isEqualTo(Document.parse("{ \"$toDecimal\" : \"$field\"}")); + assertThat(transform("toDecimal(field)")).isEqualTo("{ \"$toDecimal\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderToDouble() { - assertThat(transform("toDouble(field)")).isEqualTo(Document.parse("{ \"$toDouble\" : \"$field\"}")); + assertThat(transform("toDouble(field)")).isEqualTo("{ \"$toDouble\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderToInt() { - assertThat(transform("toInt(field)")).isEqualTo(Document.parse("{ \"$toInt\" : \"$field\"}")); + assertThat(transform("toInt(field)")).isEqualTo("{ \"$toInt\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderToLong() { - assertThat(transform("toLong(field)")).isEqualTo(Document.parse("{ \"$toLong\" : \"$field\"}")); + assertThat(transform("toLong(field)")).isEqualTo("{ \"$toLong\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderToObjectId() { - assertThat(transform("toObjectId(field)")).isEqualTo(Document.parse("{ \"$toObjectId\" : \"$field\"}")); + assertThat(transform("toObjectId(field)")).isEqualTo("{ \"$toObjectId\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderToString() { - assertThat(transform("toString(field)")).isEqualTo(Document.parse("{ \"$toString\" : \"$field\"}")); + assertThat(transform("toString(field)")).isEqualTo("{ \"$toString\" : \"$field\"}"); } @Test // DATAMONGO-2077 void shouldRenderDateFromStringWithoutOptionalParameters() { assertThat(transform("dateFromString(field)")) - .isEqualTo(Document.parse("{ \"$dateFromString\" : {\"dateString\" : \"$field\" }}")); + .isEqualTo("{ \"$dateFromString\" : {\"dateString\" : \"$field\" }}"); } @Test // DATAMONGO-2077 void shouldRenderDateFromStringWithFormat() { assertThat(transform("dateFromString(field, 'DD-MM-YYYY')")).isEqualTo( - Document.parse("{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\" }}")); + "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\" }}"); } @Test // DATAMONGO-2077 void shouldRenderDateFromStringWithFormatAndTimezone() { - assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC')")).isEqualTo(Document.parse( - "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\" }}")); + assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC')")).isEqualTo( + "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\" }}"); } @Test // DATAMONGO-2077 void shouldRenderDateFromStringWithFormatTimezoneAndOnError() { - assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC', -1)")).isEqualTo(Document.parse( - "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\", \"onError\" : -1 }}")); + assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC', -1)")).isEqualTo( + "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\", \"onError\" : -1 }}"); } @Test // DATAMONGO-2077 void shouldRenderDateFromStringWithFormatTimezoneOnErrorAndOnNull() { - assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC', -1, -2)")).isEqualTo(Document.parse( - "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\", \"onError\" : -1, \"onNull\" : -2}}")); + assertThat(transform("dateFromString(field, 'DD-MM-YYYY', 'UTC', -1, -2)")).isEqualTo( + "{ \"$dateFromString\" : {\"dateString\" : \"$field\", \"format\" : \"DD-MM-YYYY\", \"timezone\" : \"UTC\", \"onError\" : -1, \"onNull\" : -2}}"); } @Test // DATAMONGO-2077, DATAMONGO-2671 void shouldRenderDateFromParts() { - assertThat(transform("dateFromParts(y, m, d, h, mm, s, ms, 'UTC')")).isEqualTo(Document.parse( - "{ \"$dateFromParts\" : {\"year\" : \"$y\", \"month\" : \"$m\", \"day\" : \"$d\", \"hour\" : \"$h\", \"minute\" : \"$mm\", \"second\" : \"$s\", \"millisecond\" : \"$ms\", \"timezone\" : \"UTC\"}}")); + assertThat(transform("dateFromParts(y, m, d, h, mm, s, ms, 'UTC')")).isEqualTo( + "{ \"$dateFromParts\" : {\"year\" : \"$y\", \"month\" : \"$m\", \"day\" : \"$d\", \"hour\" : \"$h\", \"minute\" : \"$mm\", \"second\" : \"$s\", \"millisecond\" : \"$ms\", \"timezone\" : \"UTC\"}}"); } @Test // DATAMONGO-2077, DATAMONGO-2671 void shouldRenderIsoDateFromParts() { - assertThat(transform("isoDateFromParts(y, m, d, h, mm, s, ms, 'UTC')")).isEqualTo(Document.parse( - "{ \"$dateFromParts\" : {\"isoWeekYear\" : \"$y\", \"isoWeek\" : \"$m\", \"isoDayOfWeek\" : \"$d\", \"hour\" : \"$h\", \"minute\" : \"$mm\", \"second\" : \"$s\", \"millisecond\" : \"$ms\", \"timezone\" : \"UTC\"}}")); + assertThat(transform("isoDateFromParts(y, m, d, h, mm, s, ms, 'UTC')")).isEqualTo( + "{ \"$dateFromParts\" : {\"isoWeekYear\" : \"$y\", \"isoWeek\" : \"$m\", \"isoDayOfWeek\" : \"$d\", \"hour\" : \"$h\", \"minute\" : \"$mm\", \"second\" : \"$s\", \"millisecond\" : \"$ms\", \"timezone\" : \"UTC\"}}"); } @Test // DATAMONGO-2077 void shouldRenderDateToParts() { assertThat(transform("dateToParts(field, 'UTC', false)")).isEqualTo( - Document.parse("{ \"$dateToParts\" : {\"date\" : \"$field\", \"timezone\" : \"UTC\", \"iso8601\" : false}}")); + "{ \"$dateToParts\" : {\"date\" : \"$field\", \"timezone\" : \"UTC\", \"iso8601\" : false}}"); } @Test // DATAMONGO-2077 void shouldRenderIndexOfArray() { assertThat(transform("indexOfArray(field, 2)")) - .isEqualTo(Document.parse("{ \"$indexOfArray\" : [\"$field\", 2 ]}")); + .isEqualTo("{ \"$indexOfArray\" : [\"$field\", 2 ]}"); } @Test // DATAMONGO-2077 void shouldRenderRange() { - assertThat(transform("range(0, 10, 2)")).isEqualTo(Document.parse("{ \"$range\" : [0, 10, 2 ]}")); + assertThat(transform("range(0, 10, 2)")).isEqualTo("{ \"$range\" : [0, 10, 2 ]}"); } @Test // DATAMONGO-2370 void shouldRenderRound() { - assertThat(transform("round(field)")).isEqualTo(Document.parse("{ \"$round\" : [\"$field\"]}")); + assertThat(transform("round(field)")).isEqualTo("{ \"$round\" : [\"$field\"]}"); } @Test // DATAMONGO-2370 void shouldRenderRoundWithPlace() { - assertThat(transform("round(field, 2)")).isEqualTo(Document.parse("{ \"$round\" : [\"$field\", 2]}")); + assertThat(transform("round(field, 2)")).isEqualTo("{ \"$round\" : [\"$field\", 2]}"); } @Test // GH-3714 void shouldRenderDegreesToRadians() { - assertThat(transform("degreesToRadians(angle_a)")).isEqualTo(Document.parse("{ \"$degreesToRadians\" : \"$angle_a\"}")); + assertThat(transform("degreesToRadians(angle_a)")).isEqualTo("{ \"$degreesToRadians\" : \"$angle_a\"}"); } @Test // GH-3712 void shouldRenderCovariancePop() { assertThat(transform("covariancePop(field1, field2)")) - .isEqualTo(Document.parse("{ \"$covariancePop\" : [\"$field1\", \"$field2\"]}")); + .isEqualTo("{ \"$covariancePop\" : [\"$field1\", \"$field2\"]}"); } @Test // GH-3712 void shouldRenderCovarianceSamp() { assertThat(transform("covarianceSamp(field1, field2)")) - .isEqualTo(Document.parse("{ \"$covarianceSamp\" : [\"$field1\", \"$field2\"]}")); + .isEqualTo("{ \"$covarianceSamp\" : [\"$field1\", \"$field2\"]}"); } @Test // GH-3715 void shouldRenderRank() { - assertThat(transform("rank()")).isEqualTo(Document.parse("{ $rank : {} }")); + assertThat(transform("rank()")).isEqualTo("{ $rank : {} }"); } @Test // GH-3715 void shouldRenderDenseRank() { - assertThat(transform("denseRank()")).isEqualTo(Document.parse("{ $denseRank : {} }")); + assertThat(transform("denseRank()")).isEqualTo("{ $denseRank : {} }"); } @Test // GH-3717 void shouldRenderDocumentNumber() { - assertThat(transform("documentNumber()")).isEqualTo(Document.parse("{ $documentNumber : {} }")); + assertThat(transform("documentNumber()")).isEqualTo("{ $documentNumber : {} }"); } @Test // GH-3727 void rendersShift() { assertThat(transform("shift(quantity, 1)")) - .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1 } }")); + .isEqualTo("{ $shift: { output: \"$quantity\", by: 1 } }"); } @Test // GH-3727 void rendersShiftWithDefault() { assertThat(transform("shift(quantity, 1, 'Not available')")) - .isEqualTo(Document.parse("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }")); + .isEqualTo("{ $shift: { output: \"$quantity\", by: 1, default: \"Not available\" } }"); } @Test // GH-3716 void shouldRenderDerivative() { assertThat(transform("derivative(miles, 'hour')")) - .isEqualTo(Document.parse("{ \"$derivative\" : { input : '$miles', unit : 'hour'} }")); + .isEqualTo("{ \"$derivative\" : { input : '$miles', unit : 'hour'} }"); } @Test // GH-3721 void shouldRenderIntegral() { - assertThat(transform("integral(field)")).isEqualTo(Document.parse("{ \"$integral\" : { \"input\" : \"$field\" }}")); + assertThat(transform("integral(field)")).isEqualTo("{ \"$integral\" : { \"input\" : \"$field\" }}"); } @Test // GH-3721 void shouldRenderIntegralWithUnit() { assertThat(transform("integral(field, 'hour')")) - .isEqualTo(Document.parse("{ \"$integral\" : { \"input\" : \"$field\", \"unit\" : \"hour\" }}")); + .isEqualTo("{ \"$integral\" : { \"input\" : \"$field\", \"unit\" : \"hour\" }}"); } @Test // GH-3728 void shouldRenderSin() { - assertThat(transform("sin(angle)")).isEqualTo(Document.parse("{ \"$sin\" : \"$angle\"}")); + assertThat(transform("sin(angle)")).isEqualTo("{ \"$sin\" : \"$angle\"}"); } @Test // GH-3728 void shouldRenderSinh() { - assertThat(transform("sinh(angle)")).isEqualTo(Document.parse("{ \"$sinh\" : \"$angle\"}")); + assertThat(transform("sinh(angle)")).isEqualTo("{ \"$sinh\" : \"$angle\"}"); } @Test // GH-3710 void shouldRenderCos() { - assertThat(transform("cos(angle)")).isEqualTo(Document.parse("{ \"$cos\" : \"$angle\"}")); + assertThat(transform("cos(angle)")).isEqualTo("{ \"$cos\" : \"$angle\"}"); } @Test // GH-3710 void shouldRenderCosh() { - assertThat(transform("cosh(angle)")).isEqualTo(Document.parse("{ \"$cosh\" : \"$angle\"}")); + assertThat(transform("cosh(angle)")).isEqualTo("{ \"$cosh\" : \"$angle\"}"); } @Test // GH-3730 void shouldRenderTan() { - assertThat(transform("tan(angle)")).isEqualTo(Document.parse("{ \"$tan\" : \"$angle\"}")); + assertThat(transform("tan(angle)")).isEqualTo("{ \"$tan\" : \"$angle\"}"); } @Test // GH-3730 void shouldRenderTanh() { - assertThat(transform("tanh(angle)")).isEqualTo(Document.parse("{ \"$tanh\" : \"$angle\"}")); + assertThat(transform("tanh(angle)")).isEqualTo("{ \"$tanh\" : \"$angle\"}"); } - private Object transform(String expression, Object... params) { + private Document transform(String expression, Object... params) { + return (Document) transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); + } + + private Object transformValue(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); } diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index 9b00811a7b..6373eb663c 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -85,7 +85,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `count` (+++*+++), `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators -| `abs`, `add` (+++*+++ via `plus`), `ceil`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `trunc` +| `abs`, `add` (+++*+++ via `plus`), `ceil`, `cos`, `cosh`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `sin`, `sinh`, `tan`, `tanh`, `trunc` | String Aggregation Operators | `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` @@ -112,7 +112,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `type` | Convert Aggregation Operators -| `convert`, `toBool`, `toDate`, `toDecimal`, `toDouble`, `toInt`, `toLong`, `toObjectId`, `toString` +| `convert`, `degreesToRadians`, `toBool`, `toDate`, `toDecimal`, `toDouble`, `toInt`, `toLong`, `toObjectId`, `toString` | Object Aggregation Operators | `objectToArray`, `mergeObjects` From aca403c11240a5e2253d36e4ecba627283dd9ee7 Mon Sep 17 00:00:00 2001 From: Ryan Gibb Date: Fri, 30 Jul 2021 14:06:42 +0100 Subject: [PATCH 097/983] Fix a typo in `MongoConverter` javadoc. Original pull request: #3758. --- .../data/mongodb/core/convert/MongoConverter.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java index 8887a3bd03..20499d3173 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java @@ -40,13 +40,14 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author Ryan Gibb */ public interface MongoConverter extends EntityConverter, MongoPersistentProperty, Object, Bson>, MongoWriter, EntityReader { /** - * Returns thw {@link TypeMapper} being used to write type information into {@link Document}s created with that + * Returns the {@link TypeMapper} being used to write type information into {@link Document}s created with that * converter. * * @return will never be {@literal null}. From 869b88702db52a25417da5427859f75621d78e7d Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 25 Aug 2021 10:15:13 +0200 Subject: [PATCH 098/983] Polishing. Fix typo in reference docs. See #3758 --- src/main/asciidoc/reference/aggregation-framework.adoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index 6373eb663c..f23b290697 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -88,7 +88,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `abs`, `add` (+++*+++ via `plus`), `ceil`, `cos`, `cosh`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `sin`, `sinh`, `tan`, `tanh`, `trunc` | String Aggregation Operators -| `concat`, `substr`, `toLower`, `toUpper`, `stcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` +| `concat`, `substr`, `toLower`, `toUpper`, `strcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` | Comparison Aggregation Operators | `eq` (+++*+++ via `is`), `gt`, `gte`, `lt`, `lte`, `ne` From afef243634e4efa215755cf14b678bebe10ff92a Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 26 Jul 2021 08:46:22 +0200 Subject: [PATCH 099/983] Add support for `$dateAdd` aggregation operator. Closes: #3713 Original pull request: #3748. --- .../core/aggregation/DateOperators.java | 144 +++++++++++++++++- .../core/spel/MethodReferenceNode.java | 1 + .../aggregation/DateOperatorsUnitTests.java | 44 ++++++ .../SpelExpressionTransformerUnitTests.java | 6 + 4 files changed, 189 insertions(+), 6 deletions(-) create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java index 15f10f7d6c..f7abf88d72 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java @@ -16,6 +16,7 @@ package org.springframework.data.mongodb.core.aggregation; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; @@ -156,7 +157,7 @@ public static Timezone none() { * representing an Olson Timezone Identifier or UTC Offset. * * @param value the plain timezone {@link String}, a {@link Field} holding the timezone or an - * {@link AggregationExpression} resulting in the timezone. + * {@link AggregationExpression} resulting in the timezone. * @return new instance of {@link Timezone}. */ public static Timezone valueOf(Object value) { @@ -274,6 +275,41 @@ public DateOperatorFactory withTimezone(Timezone timezone) { return new DateOperatorFactory(fieldReference, expression, dateValue, timezone); } + /** + * Creates new {@link AggregationExpression} that adds the value of the given {@link AggregationExpression + * expression} (in {@literal units). @param expression must not be {@literal null}. + * + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. + * @since 3.3 + */ + public DateAdd addValueOf(AggregationExpression expression, String unit) { + return applyTimezone(DateAdd.addValueOf(expression, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the value stored at the given {@literal field} (in + * {@literal units). @param fieldReference must not be {@literal null}. + * + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. + * @since 3.3 + */ + public DateAdd addValueOf(String fieldReference, String unit) { + return applyTimezone(DateAdd.addValueOf(fieldReference, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the given value (in {@literal units). @param value must not + * be {@literal null}. @param unit the unit of measure. Must not be {@literal null}. + * + * @return + * @since 3.3 new instance of {@link DateAdd}. + */ + public DateAdd add(Object value, String unit) { + return applyTimezone(DateAdd.addValue(value, unit).toDate(dateReference()), timezone); + } + /** * Creates new {@link AggregationExpression} that returns the day of the year for a date as a number between 1 and * 366. @@ -1480,7 +1516,6 @@ protected java.util.Map append(String key, Object value) { } else { clone.put("timezone", ((Timezone) value).value); } - } else { clone.put(key, value); } @@ -1911,7 +1946,7 @@ default T millisecondOf(AggregationExpression expression) { * @author Matt Morrissette * @author Christoph Strobl * @see https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/ + * "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/ * @since 2.1 */ public static class DateFromParts extends TimezonedDateAggregationExpression implements DateParts { @@ -2086,7 +2121,7 @@ default DateFromParts yearOf(AggregationExpression expression) { * @author Matt Morrissette * @author Christoph Strobl * @see https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/ + * "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromParts/ * @since 2.1 */ public static class IsoDateFromParts extends TimezonedDateAggregationExpression @@ -2262,7 +2297,7 @@ default IsoDateFromParts isoWeekYearOf(AggregationExpression expression) { * @author Matt Morrissette * @author Christoph Strobl * @see https://docs.mongodb.com/manual/reference/operator/aggregation/dateToParts/ + * "https://docs.mongodb.com/manual/reference/operator/aggregation/dateToParts/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateToParts/ * @since 2.1 */ public static class DateToParts extends TimezonedDateAggregationExpression { @@ -2343,7 +2378,7 @@ protected String getMongoMethod() { * @author Matt Morrissette * @author Christoph Strobl * @see https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromString/ + * "https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromString/">https://docs.mongodb.com/manual/reference/operator/aggregation/dateFromString/ * @since 2.1 */ public static class DateFromString extends TimezonedDateAggregationExpression { @@ -2418,6 +2453,103 @@ protected String getMongoMethod() { } } + /** + * {@link AggregationExpression} for {@code $dateAdd}.
        + * NOTE: Requires MongoDB 5.0 or later. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DateAdd extends TimezonedDateAggregationExpression { + + private DateAdd(Object value) { + super(value); + } + + /** + * Add the number of {@literal units} of the result of the given {@link AggregationExpression expression} to a + * {@link #toDate(Object) start date}. + * + * @param expression must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateAdd addValueOf(AggregationExpression expression, String unit) { + return addValue(expression, unit); + } + + /** + * Add the number of {@literal units} from a {@literal field} to a {@link #toDate(Object) start date}. + * + * @param fieldReference must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateAdd addValueOf(String fieldReference, String unit) { + return addValue(Fields.field(fieldReference), unit); + } + + /** + * Add the number of {@literal units} to a {@link #toDate(Object) start date}. + * + * @param value must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateAdd addValue(Object value, String unit) { + + Map args = new HashMap<>(); + args.put("unit", unit); + args.put("amount", value); + return new DateAdd(args); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateAdd toDateOf(AggregationExpression expression) { + return toDate(expression); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateAdd toDateOf(String fieldReference) { + return toDate(Fields.field(fieldReference)); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param dateExpression anything that evaluates to a valid date. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateAdd toDate(Object dateExpression) { + return new DateAdd(append("startDate", dateExpression)); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateAdd}. + */ + public DateAdd withTimezone(Timezone timezone) { + return new DateAdd(appendTimezone(argumentMap(), timezone)); + } + + @Override + protected String getMongoMethod() { + return "$dateAdd"; + } + } + @SuppressWarnings("unchecked") private static T applyTimezone(T instance, Timezone timezone) { return !ObjectUtils.nullSafeEquals(Timezone.none(), timezone) && !instance.hasTimezone() diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 1efe94c757..6b4daa15b8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -144,6 +144,7 @@ public class MethodReferenceNode extends ExpressionNode { map.put("literal", singleArgRef().forOperator("$literal")); // DATE OPERATORS + map.put("dateAdd", mapArgRef().forOperator("$dateAdd").mappingParametersTo("startDate", "unit", "amount", "timezone")); map.put("dayOfYear", singleArgRef().forOperator("$dayOfYear")); map.put("dayOfMonth", singleArgRef().forOperator("$dayOfMonth")); map.put("dayOfWeek", singleArgRef().forOperator("$dayOfWeek")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java new file mode 100644 index 0000000000..036edfdce1 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java @@ -0,0 +1,44 @@ +/* + * Copyright 2021. the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; +import org.springframework.data.mongodb.core.aggregation.DateOperators.Timezone; + +/** + * @author Christoph Strobl + */ +class DateOperatorsUnitTests { + + @Test // GH-3713 + void rendersDateAdd() { + + assertThat(DateOperators.dateOf("purchaseDate").add(3, "day").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }")); + } + + @Test // GH-3713 + void rendersDateAddWithTimezone() { + + assertThat(DateOperators.dateOf("purchaseDate").withTimezone(Timezone.valueOf("America/Chicago")).add(3, "day") + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse( + "{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3, timezone : \"America/Chicago\" } }")); + } + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 193ffb520d..337d61f984 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1039,6 +1039,12 @@ private Document transform(String expression, Object... params) { } private Object transformValue(String expression, Object... params) { + @Test // GH-3713 + void shouldRenderDateAdd() { + assertThat(transform("dateAdd(purchaseDate, 'day', 3)")).isEqualTo(Document.parse("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }")); + } + + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); } From fc41793d5de7bf48f551638e9836ef0eaceb1c43 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 26 Jul 2021 09:08:18 +0200 Subject: [PATCH 100/983] Add support for `$dateDiff` aggregation operator. Closes: #3713 Original pull request: #3748. --- .../core/aggregation/DateOperators.java | 144 ++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 1 + .../aggregation/DateOperatorsUnitTests.java | 16 ++ .../SpelExpressionTransformerUnitTests.java | 5 + 4 files changed, 166 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java index f7abf88d72..a9b1d411cd 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java @@ -340,6 +340,42 @@ public DayOfWeek dayOfWeek() { return applyTimezone(DayOfWeek.dayOfWeek(dateReference()), timezone); } + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units) to the date + * computed by the given {@link AggregationExpression expression}. @param expression must not be {@literal null}. + * + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. + * @since 3.3 + */ + public DateDiff diffValueOf(AggregationExpression expression, String unit) { + return applyTimezone(DateDiff.diffValueOf(expression, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units) to the date stored + * at the given {@literal field}. @param expression must not be {@literal null}. + * + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. + * @since 3.3 + */ + public DateDiff diffValueOf(String fieldReference, String unit) { + return applyTimezone(DateDiff.diffValueOf(fieldReference, unit).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units) to the date given + * {@literal value}. @param value anything the resolves to a valid date. Must not be {@literal null}. + * + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. + * @since 3.3 + */ + public DateDiff diff(Object value, String unit) { + return applyTimezone(DateDiff.diffValue(value, unit).toDate(dateReference()), timezone); + } + /** * Creates new {@link AggregationExpression} that returns the year portion of a date. * @@ -2550,6 +2586,114 @@ protected String getMongoMethod() { } } + /** + * {@link AggregationExpression} for {@code $dateDiff}.
        + * NOTE: Requires MongoDB 5.0 or later. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DateDiff extends TimezonedDateAggregationExpression { + + private DateDiff(Object value) { + super(value); + } + + /** + * Add the number of {@literal units} of the result of the given {@link AggregationExpression expression} to a + * {@link #toDate(Object) start date}. + * + * @param expression must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateDiff diffValueOf(AggregationExpression expression, String unit) { + return diffValue(expression, unit); + } + + /** + * Add the number of {@literal units} from a {@literal field} to a {@link #toDate(Object) start date}. + * + * @param fieldReference must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateDiff diffValueOf(String fieldReference, String unit) { + return diffValue(Fields.field(fieldReference), unit); + } + + /** + * Add the number of {@literal units} to a {@link #toDate(Object) start date}. + * + * @param value must not be {@literal null}. + * @param unit must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public static DateDiff diffValue(Object value, String unit) { + + Map args = new HashMap<>(); + args.put("unit", unit); + args.put("endDate", value); + return new DateDiff(args); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateDiff toDateOf(AggregationExpression expression) { + return toDate(expression); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateDiff toDateOf(String fieldReference) { + return toDate(Fields.field(fieldReference)); + } + + /** + * Define the start date, in UTC, for the addition operation. + * + * @param dateExpression anything that evaluates to a valid date. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. + */ + public DateDiff toDate(Object dateExpression) { + return new DateDiff(append("startDate", dateExpression)); + } + + /** + * Optionally set the {@link Timezone} to use. If not specified {@literal UTC} is used. + * + * @param timezone must not be {@literal null}. Consider {@link Timezone#none()} instead. + * @return new instance of {@link DateAdd}. + */ + public DateDiff withTimezone(Timezone timezone) { + return new DateDiff(appendTimezone(argumentMap(), timezone)); + } + + /** + * Set the start day of the week if the unit if measure is set to {@literal week}. Uses {@literal Sunday} by + * default. + * + * @param day must not be {@literal null}. + * @return new instance of {@link DateDiff}. + */ + public DateDiff startOfWeek(Object day) { + return new DateDiff(append("startOfWeek", day)); + } + + @Override + protected String getMongoMethod() { + return "$dateDiff"; + } + } + @SuppressWarnings("unchecked") private static T applyTimezone(T instance, Timezone timezone) { return !ObjectUtils.nullSafeEquals(Timezone.none(), timezone) && !instance.hasTimezone() diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 6b4daa15b8..6a60a7df1a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -145,6 +145,7 @@ public class MethodReferenceNode extends ExpressionNode { // DATE OPERATORS map.put("dateAdd", mapArgRef().forOperator("$dateAdd").mappingParametersTo("startDate", "unit", "amount", "timezone")); + map.put("dateDiff", mapArgRef().forOperator("$dateDiff").mappingParametersTo("startDate", "endDate", "unit","timezone", "startOfWeek")); map.put("dayOfYear", singleArgRef().forOperator("$dayOfYear")); map.put("dayOfMonth", singleArgRef().forOperator("$dayOfMonth")); map.put("dayOfWeek", singleArgRef().forOperator("$dayOfWeek")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java index 036edfdce1..ab975b852a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java @@ -40,5 +40,21 @@ void rendersDateAddWithTimezone() { .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse( "{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3, timezone : \"America/Chicago\" } }")); } + + @Test // GH-3713 + void rendersDateDiff() { + + assertThat( + DateOperators.dateOf("purchaseDate").diffValueOf("delivered", "day").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document + .parse("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }")); + } + + @Test // GH-3713 + void rendersDateDiffWithTimezone() { + + assertThat(DateOperators.dateOf("purchaseDate").withTimezone(Timezone.valueOf("America/Chicago")) + .diffValueOf("delivered", "day").toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse( + "{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\", timezone : \"America/Chicago\" } }")); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 337d61f984..33edab1d5a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1044,6 +1044,11 @@ void shouldRenderDateAdd() { assertThat(transform("dateAdd(purchaseDate, 'day', 3)")).isEqualTo(Document.parse("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }")); } + @Test // GH-3713 + void shouldRenderDateDiff() { + assertThat(transform("dateDiff(purchaseDate, delivered, 'day')")).isEqualTo(Document.parse("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }")); + } + private Object transform(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); From 456c1ad26abb77d82595f7e1589ca02a9a780da9 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 26 Jul 2021 09:18:16 +0200 Subject: [PATCH 101/983] Add shortcut for date aggregation operators working with timezone. See: #3713 Original pull request: #3748. --- .../core/aggregation/DateOperators.java | 26 +++++++++++++++++++ .../aggregation/DateOperatorsUnitTests.java | 6 ++--- 2 files changed, 29 insertions(+), 3 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java index a9b1d411cd..a97d64c52d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java @@ -46,6 +46,19 @@ public static DateOperatorFactory dateOf(String fieldReference) { return new DateOperatorFactory(fieldReference); } + /** + * Take the date referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateOperatorFactory}. + * @since 3.3 + */ + public static DateOperatorFactory zonedDateOf(String fieldReference, Timezone timezone) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new DateOperatorFactory(fieldReference).withTimezone(timezone); + } + /** * Take the date resulting from the given {@link AggregationExpression}. * @@ -58,6 +71,19 @@ public static DateOperatorFactory dateOf(AggregationExpression expression) { return new DateOperatorFactory(expression); } + /** + * Take the date resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateOperatorFactory}. + * @since 3.3 + */ + public static DateOperatorFactory zonedDateOf(AggregationExpression expression, Timezone timezone) { + + Assert.notNull(expression, "Expression must not be null!"); + return new DateOperatorFactory(expression).withTimezone(timezone); + } + /** * Take the given value as date. *

        diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java index ab975b852a..95f977ed73 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java @@ -35,8 +35,8 @@ void rendersDateAdd() { @Test // GH-3713 void rendersDateAddWithTimezone() { - - assertThat(DateOperators.dateOf("purchaseDate").withTimezone(Timezone.valueOf("America/Chicago")).add(3, "day") + + assertThat(DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")).add(3, "day") .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse( "{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3, timezone : \"America/Chicago\" } }")); } @@ -53,7 +53,7 @@ void rendersDateDiff() { @Test // GH-3713 void rendersDateDiffWithTimezone() { - assertThat(DateOperators.dateOf("purchaseDate").withTimezone(Timezone.valueOf("America/Chicago")) + assertThat(DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")) .diffValueOf("delivered", "day").toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse( "{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\", timezone : \"America/Chicago\" } }")); } From 24171b3ae27a4bc867fd619420543ce1b56344f5 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 25 Aug 2021 10:59:26 +0200 Subject: [PATCH 102/983] Polishing. Introduce factory methods to convert TimeZone/ZoneId/ZoneOffset into Mongo Timezone. Introduce TemporalUnit abstraction and converters to convert ChronoUnit and TimeUnit into TemporalUnit for date operators accepting a unit parameter. See #3713 Original pull request: #3748. --- .../core/aggregation/DateOperators.java | 294 ++++++++++++++++-- .../aggregation/SetWindowFieldsOperation.java | 63 ++++ .../aggregation/DateOperatorsUnitTests.java | 48 ++- .../SpelExpressionTransformerUnitTests.java | 11 +- .../reference/aggregation-framework.adoc | 2 +- 5 files changed, 376 insertions(+), 42 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java index a97d64c52d..029b994f2e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java @@ -15,10 +15,16 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoUnit; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; +import java.util.Locale; import java.util.Map; +import java.util.TimeZone; +import java.util.concurrent.TimeUnit; import org.springframework.lang.Nullable; import org.springframework.util.Assert; @@ -157,6 +163,7 @@ public static DateFromString dateFromString(String value) { * NOTE: Support for timezones in aggregations Requires MongoDB 3.6 or later. * * @author Christoph Strobl + * @author Mark Paluch * @since 2.1 */ public static class Timezone { @@ -192,6 +199,61 @@ public static Timezone valueOf(Object value) { return new Timezone(value); } + /** + * Create a {@link Timezone} for the given {@link TimeZone} rendering the offset as UTC offset. + * + * @param timeZone {@link TimeZone} rendering the offset as UTC offset. + * @return new instance of {@link Timezone}. + * @since 3.3 + */ + public static Timezone fromOffset(TimeZone timeZone) { + + Assert.notNull(timeZone, "TimeZone must not be null!"); + + return fromOffset( + ZoneOffset.ofTotalSeconds(Math.toIntExact(TimeUnit.MILLISECONDS.toSeconds(timeZone.getRawOffset())))); + } + + /** + * Create a {@link Timezone} for the given {@link ZoneOffset} rendering the offset as UTC offset. + * + * @param offset {@link ZoneOffset} rendering the offset as UTC offset. + * @return new instance of {@link Timezone}. + * @since 3.3 + */ + public static Timezone fromOffset(ZoneOffset offset) { + + Assert.notNull(offset, "ZoneOffset must not be null!"); + return new Timezone(offset.toString()); + } + + /** + * Create a {@link Timezone} for the given {@link TimeZone} rendering the offset as UTC offset. + * + * @param timeZone {@link Timezone} rendering the offset as zone identifier. + * @return new instance of {@link Timezone}. + * @since 3.3 + */ + public static Timezone fromZone(TimeZone timeZone) { + + Assert.notNull(timeZone, "TimeZone must not be null!"); + + return valueOf(timeZone.getID()); + } + + /** + * Create a {@link Timezone} for the given {@link java.time.ZoneId} rendering the offset as UTC offset. + * + * @param zoneId {@link ZoneId} rendering the offset as zone identifier. + * @return new instance of {@link Timezone}. + * @since 3.3 + */ + public static Timezone fromZone(ZoneId zoneId) { + + Assert.notNull(zoneId, "ZoneId must not be null!"); + return new Timezone(zoneId.toString()); + } + /** * Create a {@link Timezone} for the {@link Field} reference holding the Olson Timezone Identifier or UTC Offset. * @@ -212,6 +274,11 @@ public static Timezone ofField(String fieldReference) { public static Timezone ofExpression(AggregationExpression expression) { return valueOf(expression); } + + @Nullable + Object getValue() { + return value; + } } /** @@ -303,32 +370,64 @@ public DateOperatorFactory withTimezone(Timezone timezone) { /** * Creates new {@link AggregationExpression} that adds the value of the given {@link AggregationExpression - * expression} (in {@literal units). @param expression must not be {@literal null}. - * + * expression} (in {@literal units}). + * + * @param expression must not be {@literal null}. * @param unit the unit of measure. Must not be {@literal null}. - * @return new instance of {@link DateAdd}. - * @since 3.3 + * @return new instance of {@link DateAdd}. @since 3.3 */ public DateAdd addValueOf(AggregationExpression expression, String unit) { return applyTimezone(DateAdd.addValueOf(expression, unit).toDate(dateReference()), timezone); } + /** + * Creates new {@link AggregationExpression} that adds the value of the given {@link AggregationExpression + * expression} (in {@literal units}). + * + * @param expression must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateAdd addValueOf(AggregationExpression expression, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + return applyTimezone(DateAdd.addValueOf(expression, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), + timezone); + } + /** * Creates new {@link AggregationExpression} that adds the value stored at the given {@literal field} (in - * {@literal units). @param fieldReference must not be {@literal null}. - * + * {@literal units}). + * + * @param fieldReference must not be {@literal null}. * @param unit the unit of measure. Must not be {@literal null}. - * @return new instance of {@link DateAdd}. - * @since 3.3 + * @return new instance of {@link DateAdd}. @since 3.3 */ public DateAdd addValueOf(String fieldReference, String unit) { return applyTimezone(DateAdd.addValueOf(fieldReference, unit).toDate(dateReference()), timezone); } /** - * Creates new {@link AggregationExpression} that adds the given value (in {@literal units). @param value must not - * be {@literal null}. @param unit the unit of measure. Must not be {@literal null}. - * + * Creates new {@link AggregationExpression} that adds the value stored at the given {@literal field} (in + * {@literal units}). + * + * @param fieldReference must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateAdd addValueOf(String fieldReference, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone( + DateAdd.addValueOf(fieldReference, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that adds the given value (in {@literal units}). + * + * @param value must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. * @return * @since 3.3 new instance of {@link DateAdd}. */ @@ -336,6 +435,22 @@ public DateAdd add(Object value, String unit) { return applyTimezone(DateAdd.addValue(value, unit).toDate(dateReference()), timezone); } + /** + * Creates new {@link AggregationExpression} that adds the given value (in {@literal units}). + * + * @param value must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return + * @since 3.3 new instance of {@link DateAdd}. + */ + public DateAdd add(Object value, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone(DateAdd.addValue(value, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), + timezone); + } + /** * Creates new {@link AggregationExpression} that returns the day of the year for a date as a number between 1 and * 366. @@ -367,41 +482,89 @@ public DayOfWeek dayOfWeek() { } /** - * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units) to the date - * computed by the given {@link AggregationExpression expression}. @param expression must not be {@literal null}. - * + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date + * computed by the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. * @param unit the unit of measure. Must not be {@literal null}. - * @return new instance of {@link DateAdd}. - * @since 3.3 + * @return new instance of {@link DateAdd}. @since 3.3 */ public DateDiff diffValueOf(AggregationExpression expression, String unit) { return applyTimezone(DateDiff.diffValueOf(expression, unit).toDate(dateReference()), timezone); } /** - * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units) to the date stored - * at the given {@literal field}. @param expression must not be {@literal null}. - * + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date + * computed by the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. * @param unit the unit of measure. Must not be {@literal null}. - * @return new instance of {@link DateAdd}. - * @since 3.3 + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diffValueOf(AggregationExpression expression, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone( + DateDiff.diffValueOf(expression, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date stored + * at the given {@literal field}. + * + * @param fieldReference must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 */ public DateDiff diffValueOf(String fieldReference, String unit) { return applyTimezone(DateDiff.diffValueOf(fieldReference, unit).toDate(dateReference()), timezone); } /** - * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units) to the date given - * {@literal value}. @param value anything the resolves to a valid date. Must not be {@literal null}. - * + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date stored + * at the given {@literal field}. + * + * @param fieldReference must not be {@literal null}. * @param unit the unit of measure. Must not be {@literal null}. - * @return new instance of {@link DateAdd}. - * @since 3.3 + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diffValueOf(String fieldReference, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone( + DateDiff.diffValueOf(fieldReference, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), timezone); + } + + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date given + * {@literal value}. + * + * @param value anything the resolves to a valid date. Must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 */ public DateDiff diff(Object value, String unit) { return applyTimezone(DateDiff.diffValue(value, unit).toDate(dateReference()), timezone); } + /** + * Creates new {@link AggregationExpression} that calculates the difference (in {@literal units}) to the date given + * {@literal value}. + * + * @param value anything the resolves to a valid date. Must not be {@literal null}. + * @param unit the unit of measure. Must not be {@literal null}. + * @return new instance of {@link DateAdd}. @since 3.3 + */ + public DateDiff diff(Object value, TemporalUnit unit) { + + Assert.notNull(unit, "TemporalUnit must not be null"); + + return applyTimezone(DateDiff.diffValue(value, unit.name().toLowerCase(Locale.ROOT)).toDate(dateReference()), + timezone); + } + /** * Creates new {@link AggregationExpression} that returns the year portion of a date. * @@ -2720,6 +2883,85 @@ protected String getMongoMethod() { } } + /** + * Interface defining a temporal unit for date operators. + * + * @author Mark Paluch + * @since 3.3 + */ + public interface TemporalUnit { + + String name(); + + /** + * Converts the given time unit into a {@link TemporalUnit}. Supported units are: days, hours, minutes, seconds, and + * milliseconds. + * + * @param timeUnit the time unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static TemporalUnit from(TimeUnit timeUnit) { + + Assert.notNull(timeUnit, "TimeUnit must not be null"); + + switch (timeUnit) { + case DAYS: + return TemporalUnits.DAY; + case HOURS: + return TemporalUnits.HOUR; + case MINUTES: + return TemporalUnits.MINUTE; + case SECONDS: + return TemporalUnits.SECOND; + case MILLISECONDS: + return TemporalUnits.MILLISECOND; + } + + throw new IllegalArgumentException(String.format("Cannot create TemporalUnit from %s", timeUnit)); + } + + /** + * Converts the given chrono unit into a {@link TemporalUnit}. Supported units are: years, weeks, months, days, + * hours, minutes, seconds, and millis. + * + * @param chronoUnit the chrono unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static TemporalUnit from(ChronoUnit chronoUnit) { + + switch (chronoUnit) { + case YEARS: + return TemporalUnits.YEAR; + case WEEKS: + return TemporalUnits.WEEK; + case MONTHS: + return TemporalUnits.MONTH; + case DAYS: + return TemporalUnits.DAY; + case HOURS: + return TemporalUnits.HOUR; + case MINUTES: + return TemporalUnits.MINUTE; + case SECONDS: + return TemporalUnits.SECOND; + case MILLIS: + return TemporalUnits.MILLISECOND; + } + + throw new IllegalArgumentException(String.format("Cannot create TemporalUnit from %s", chronoUnit)); + } + } + + /** + * Supported temporal units. + */ + enum TemporalUnits implements TemporalUnit { + YEAR, QUARTER, WEEK, MONTH, DAY, HOUR, MINUTE, SECOND, MILLISECOND + + } + @SuppressWarnings("unchecked") private static T applyTimezone(T instance, Timezone timezone) { return !ObjectUtils.nullSafeEquals(Timezone.none(), timezone) && !instance.hasTimezone() diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java index 9c40a0b642..fa01b02b98 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetWindowFieldsOperation.java @@ -15,9 +15,11 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.concurrent.TimeUnit; import org.bson.Document; import org.springframework.data.domain.Sort; @@ -626,7 +628,68 @@ public Document toDocument(AggregationOperationContext ctx) { * The actual time unit to apply to a {@link Window}. */ public interface WindowUnit { + String name(); + + /** + * Converts the given time unit into a {@link WindowUnit}. Supported units are: days, hours, minutes, seconds, and + * milliseconds. + * + * @param timeUnit the time unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static WindowUnit from(TimeUnit timeUnit) { + + Assert.notNull(timeUnit, "TimeUnit must not be null"); + + switch (timeUnit) { + case DAYS: + return WindowUnits.DAY; + case HOURS: + return WindowUnits.HOUR; + case MINUTES: + return WindowUnits.MINUTE; + case SECONDS: + return WindowUnits.SECOND; + case MILLISECONDS: + return WindowUnits.MILLISECOND; + } + + throw new IllegalArgumentException(String.format("Cannot create WindowUnit from %s", timeUnit)); + } + + /** + * Converts the given chrono unit into a {@link WindowUnit}. Supported units are: years, weeks, months, days, hours, + * minutes, seconds, and millis. + * + * @param chronoUnit the chrono unit to convert, must not be {@literal null}. + * @return + * @throws IllegalArgumentException if the {@link TimeUnit} is {@literal null} or not supported for conversion. + */ + static WindowUnit from(ChronoUnit chronoUnit) { + + switch (chronoUnit) { + case YEARS: + return WindowUnits.YEAR; + case WEEKS: + return WindowUnits.WEEK; + case MONTHS: + return WindowUnits.MONTH; + case DAYS: + return WindowUnits.DAY; + case HOURS: + return WindowUnits.HOUR; + case MINUTES: + return WindowUnits.MINUTE; + case SECONDS: + return WindowUnits.SECOND; + case MILLIS: + return WindowUnits.MILLISECOND; + } + + throw new IllegalArgumentException(String.format("Cannot create WindowUnit from %s", chronoUnit)); + } } /** diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java index 95f977ed73..6d63b954f8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/DateOperatorsUnitTests.java @@ -15,14 +15,22 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoUnit; +import java.util.TimeZone; -import org.bson.Document; import org.junit.jupiter.api.Test; + import org.springframework.data.mongodb.core.aggregation.DateOperators.Timezone; /** + * Unit tests for {@link DateOperators}. + * * @author Christoph Strobl + * @author Mark Paluch */ class DateOperatorsUnitTests { @@ -30,15 +38,15 @@ class DateOperatorsUnitTests { void rendersDateAdd() { assertThat(DateOperators.dateOf("purchaseDate").add(3, "day").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }")); + .isEqualTo("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }"); } @Test // GH-3713 void rendersDateAddWithTimezone() { assertThat(DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")).add(3, "day") - .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse( - "{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3, timezone : \"America/Chicago\" } }")); + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + "{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3, timezone : \"America/Chicago\" } }"); } @Test // GH-3713 @@ -46,15 +54,37 @@ void rendersDateDiff() { assertThat( DateOperators.dateOf("purchaseDate").diffValueOf("delivered", "day").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document - .parse("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }")); + .isEqualTo("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }"); } @Test // GH-3713 void rendersDateDiffWithTimezone() { assertThat(DateOperators.zonedDateOf("purchaseDate", Timezone.valueOf("America/Chicago")) - .diffValueOf("delivered", "day").toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(Document.parse( - "{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\", timezone : \"America/Chicago\" } }")); + .diffValueOf("delivered", DateOperators.TemporalUnit.from(ChronoUnit.DAYS)) + .toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo( + "{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\", timezone : \"America/Chicago\" } }"); + } + + @Test // GH-3713 + void rendersTimezoneFromZoneOffset() { + assertThat(DateOperators.Timezone.fromOffset(ZoneOffset.ofHoursMinutes(3, 30)).getValue()).isEqualTo("+03:30"); + } + + @Test // GH-3713 + void rendersTimezoneFromTimeZoneOffset() { + assertThat(DateOperators.Timezone.fromOffset(TimeZone.getTimeZone("America/Chicago")).getValue()) + .isEqualTo("-06:00"); + } + + @Test // GH-3713 + void rendersTimezoneFromTimeZoneId() { + assertThat(DateOperators.Timezone.fromZone(TimeZone.getTimeZone("America/Chicago")).getValue()) + .isEqualTo("America/Chicago"); + } + + @Test // GH-3713 + void rendersTimezoneFromZoneId() { + assertThat(DateOperators.Timezone.fromZone(ZoneId.of("America/Chicago")).getValue()).isEqualTo("America/Chicago"); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 33edab1d5a..bc8da0a3c1 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1034,11 +1034,6 @@ void shouldRenderTanh() { assertThat(transform("tanh(angle)")).isEqualTo("{ \"$tanh\" : \"$angle\"}"); } - private Document transform(String expression, Object... params) { - return (Document) transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); - } - - private Object transformValue(String expression, Object... params) { @Test // GH-3713 void shouldRenderDateAdd() { assertThat(transform("dateAdd(purchaseDate, 'day', 3)")).isEqualTo(Document.parse("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }")); @@ -1049,7 +1044,11 @@ void shouldRenderDateDiff() { assertThat(transform("dateDiff(purchaseDate, delivered, 'day')")).isEqualTo(Document.parse("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }")); } - private Object transform(String expression, Object... params) { + private Document transform(String expression, Object... params) { + return (Document) transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); + } + + private Object transformValue(String expression, Object... params) { Object result = transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); return result == null ? null : (!(result instanceof org.bson.Document) ? result.toString() : result); } diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index f23b290697..bc7a032e75 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -100,7 +100,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `literal` | Date Aggregation Operators -| `dayOfYear`, `dayOfMonth`, `dayOfWeek`, `year`, `month`, `week`, `hour`, `minute`, `second`, `millisecond`, `dateToString`, `dateFromString`, `dateFromParts`, `dateToParts`, `isoDayOfWeek`, `isoWeek`, `isoWeekYear` +| `dayOfYear`, `dayOfMonth`, `dayOfWeek`, `year`, `month`, `week`, `hour`, `minute`, `second`, `millisecond`, `dateAdd`, `dateDiff`, `dateToString`, `dateFromString`, `dateFromParts`, `dateToParts`, `isoDayOfWeek`, `isoWeek`, `isoWeekYear` | Variable Operators | `map` From 92cc2a582a8b5770996f11b409f9629678c2ce8e Mon Sep 17 00:00:00 2001 From: Mushtaq Ahmed Date: Sat, 31 Jul 2021 16:52:38 +0530 Subject: [PATCH 103/983] Add support for `$rand` aggregation operator. Closes #3724 Original pull request: #3759 --- .../core/aggregation/ArithmeticOperators.java | 25 +++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 1 + .../ArithmeticOperatorsUnitTests.java | 5 ++++ .../SpelExpressionTransformerUnitTests.java | 5 ++++ 4 files changed, 36 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 7896486abf..9610967830 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -19,6 +19,7 @@ import java.util.List; import java.util.Locale; +import org.bson.Document; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Avg; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovariancePop; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovarianceSamp; @@ -63,6 +64,16 @@ public static ArithmeticOperatorFactory valueOf(AggregationExpression expression return new ArithmeticOperatorFactory(expression); } + /** + * Creates new {@link AggregationExpression} that returns a random float between 0 and 1 each time it is called. + * + * @return new instance of {@link Rand}. + * @since 3.3 + */ + public static Rand rand() { + return new Rand(); + } + /** * @author Christoph Strobl */ @@ -2671,4 +2682,18 @@ protected String getMongoMethod() { return "$tanh"; } } + + /** + * {@link Rand} returns a floating value between 0 and 1. + * + * @author Mushtaq Ahmed + * @since 3.3 + */ + public static class Rand implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$rand", new Document()); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 6a60a7df1a..a91358353c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -99,6 +99,7 @@ public class MethodReferenceNode extends ExpressionNode { map.put("cosh", singleArgRef().forOperator("$cosh")); map.put("tan", singleArgRef().forOperator("$tan")); map.put("tanh", singleArgRef().forOperator("$tanh")); + map.put("rand", emptyRef().forOperator("$rand")); // STRING OPERATORS map.put("concat", arrayArgRef().forOperator("$concat")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index 7cde7cd1c4..b589e152aa 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -166,4 +166,9 @@ void rendersTanhWithValueInDegrees() { .isEqualTo("{ $tanh : { $degreesToRadians : \"$angle\" } }"); } + + @Test // GH-3724 + void rendersRank() { + assertThat(rand().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(new Document("$rand", new Document())); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index bc8da0a3c1..cc20ffd121 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1044,6 +1044,11 @@ void shouldRenderDateDiff() { assertThat(transform("dateDiff(purchaseDate, delivered, 'day')")).isEqualTo(Document.parse("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }")); } + @Test // GH-3724 + void shouldRenderRand() { + assertThat(transform("rand()")).isEqualTo(Document.parse("{ $rand : {} }")); + } + private Document transform(String expression, Object... params) { return (Document) transformer.transform(expression, Aggregation.DEFAULT_CONTEXT, params); } From 7c6e951c7c0088459ada9e6c5a7c3ad2a427fbaa Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 25 Aug 2021 11:13:02 +0200 Subject: [PATCH 104/983] Polishing. Add author tags, tweak Javadoc style. Simplify tests. Document operator. See #3724 Original pull request: #3759. --- .../mongodb/core/aggregation/ArithmeticOperators.java | 4 +++- .../core/aggregation/ArithmeticOperatorsUnitTests.java | 4 ++-- .../aggregation/SpelExpressionTransformerUnitTests.java | 8 +++++--- src/main/asciidoc/reference/aggregation-framework.adoc | 2 +- 4 files changed, 11 insertions(+), 7 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 9610967830..8fe3d9120c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -40,6 +40,7 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Mushtaq Ahmed * @since 1.10 */ public class ArithmeticOperators { @@ -65,7 +66,8 @@ public static ArithmeticOperatorFactory valueOf(AggregationExpression expression } /** - * Creates new {@link AggregationExpression} that returns a random float between 0 and 1 each time it is called. + * Creates new {@link AggregationExpression} that returns a random float between {@code 0} and {@code 1} each time it + * is called. * * @return new instance of {@link Rand}. * @since 3.3 diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index b589e152aa..02f76d5c10 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -29,6 +29,7 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Mushtaq Ahmed */ class ArithmeticOperatorsUnitTests { @@ -166,9 +167,8 @@ void rendersTanhWithValueInDegrees() { .isEqualTo("{ $tanh : { $degreesToRadians : \"$angle\" } }"); } - @Test // GH-3724 - void rendersRank() { + void rendersRand() { assertThat(rand().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo(new Document("$rand", new Document())); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index cc20ffd121..daba7a21cd 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1036,17 +1036,19 @@ void shouldRenderTanh() { @Test // GH-3713 void shouldRenderDateAdd() { - assertThat(transform("dateAdd(purchaseDate, 'day', 3)")).isEqualTo(Document.parse("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }")); + assertThat(transform("dateAdd(purchaseDate, 'day', 3)")) + .isEqualTo("{ $dateAdd: { startDate: \"$purchaseDate\", unit: \"day\", amount: 3 } }"); } @Test // GH-3713 void shouldRenderDateDiff() { - assertThat(transform("dateDiff(purchaseDate, delivered, 'day')")).isEqualTo(Document.parse("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }")); + assertThat(transform("dateDiff(purchaseDate, delivered, 'day')")) + .isEqualTo("{ $dateDiff: { startDate: \"$purchaseDate\", endDate: \"$delivered\", unit: \"day\" } }"); } @Test // GH-3724 void shouldRenderRand() { - assertThat(transform("rand()")).isEqualTo(Document.parse("{ $rand : {} }")); + assertThat(transform("rand()")).isEqualTo("{ $rand : {} }"); } private Document transform(String expression, Object... params) { diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index bc7a032e75..f96719adde 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -76,7 +76,7 @@ At the time of this writing, we provide support for the following Aggregation Op [cols="2*"] |=== | Pipeline Aggregation Operators -| `bucket`, `bucketAuto`, `count`, `facet`, `geoNear`, `graphLookup`, `group`, `limit`, `lookup`, `match`, `project`, `replaceRoot`, `skip`, `sort`, `unwind` +| `bucket`, `bucketAuto`, `count`, `facet`, `geoNear`, `graphLookup`, `group`, `limit`, `lookup`, `match`, `project`, `rand`, `replaceRoot`, `skip`, `sort`, `unwind` | Set Aggregation Operators | `setEquals`, `setIntersection`, `setUnion`, `setDifference`, `setIsSubset`, `anyElementTrue`, `allElementsTrue` From 302c8031f90f951fcae2e67b6d471f026f266089 Mon Sep 17 00:00:00 2001 From: sangyongchoi Date: Tue, 3 Aug 2021 23:23:59 +0900 Subject: [PATCH 105/983] Add Criteria infix functions for `maxDistance` and `minDistance`. Closes: #3761 --- .../core/query/TypedCriteriaExtensions.kt | 22 +++++++++ .../query/TypedCriteriaExtensionsTests.kt | 48 +++++++++++++++++++ 2 files changed, 70 insertions(+) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt index b8762ffbe1..eb1868e300 100644 --- a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt @@ -364,6 +364,28 @@ infix fun KProperty>.maxDistance(d: Double): Criteria = infix fun KProperty>.minDistance(d: Double): Criteria = Criteria(asString(this)).minDistance(d) +/** + * Creates a geo-spatial criterion using a $maxDistance operation, for use with $near + * + * See [MongoDB Query operator: + * $maxDistance](https://docs.mongodb.com/manual/reference/operator/query/maxDistance/) + * @author Sangyong Choi + * @since 3.2 + * @see Criteria.maxDistance + */ +infix fun Criteria.maxDistance(d: Double): Criteria = + this.maxDistance(d) + +/** + * Creates a geospatial criterion using a $minDistance operation, for use with $near or + * $nearSphere. + * @author Sangyong Choi + * @since 3.2 + * @see Criteria.minDistance + */ +infix fun Criteria.minDistance(d: Double): Criteria = + this.minDistance(d) + /** * Creates a criterion using the $elemMatch operator * diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt index 54969476d3..3b9dfc9342 100644 --- a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt @@ -317,6 +317,54 @@ class TypedCriteriaExtensionsTests { assertThat(typed).isEqualTo(expected) } + @Test + fun `maxDistance() should equal expected criteria with nearSphere`() { + val point = Point(0.0, 0.0) + + val typed = Building::location nearSphere point maxDistance 3.0 + val expected = Criteria("location") + .nearSphere(point) + .maxDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `minDistance() should equal expected criteria with nearSphere`() { + val point = Point(0.0, 0.0) + + val typed = Building::location nearSphere point minDistance 3.0 + val expected = Criteria("location") + .nearSphere(point) + .minDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `maxDistance() should equal expected criteria with near`() { + val point = Point(0.0, 0.0) + + val typed = Building::location near point maxDistance 3.0 + val expected = Criteria("location") + .near(point) + .maxDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + + @Test + fun `minDistance() should equal expected criteria with near`() { + val point = Point(0.0, 0.0) + + val typed = Building::location near point minDistance 3.0 + val expected = Criteria("location") + .near(point) + .minDistance(3.0) + + assertThat(typed).isEqualTo(expected) + } + @Test fun `elemMatch() should equal expected criteria`() { From 467536cb34162f528ecba3d494e77414bb2cb333 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 25 Aug 2021 14:33:06 +0200 Subject: [PATCH 106/983] Polishing. Update since version. Reformat code. See: #3761. --- .../mongodb/core/query/TypedCriteriaExtensions.kt | 4 ++-- .../core/query/TypedCriteriaExtensionsTests.kt | 11 +++++++---- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt index eb1868e300..ab7e32fc03 100644 --- a/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt +++ b/spring-data-mongodb/src/main/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensions.kt @@ -370,7 +370,7 @@ infix fun KProperty>.minDistance(d: Double): Criteria = * See [MongoDB Query operator: * $maxDistance](https://docs.mongodb.com/manual/reference/operator/query/maxDistance/) * @author Sangyong Choi - * @since 3.2 + * @since 3.2.5 * @see Criteria.maxDistance */ infix fun Criteria.maxDistance(d: Double): Criteria = @@ -380,7 +380,7 @@ infix fun Criteria.maxDistance(d: Double): Criteria = * Creates a geospatial criterion using a $minDistance operation, for use with $near or * $nearSphere. * @author Sangyong Choi - * @since 3.2 + * @since 3.2.5 * @see Criteria.minDistance */ infix fun Criteria.minDistance(d: Double): Criteria = diff --git a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt index 3b9dfc9342..7a5c358fad 100644 --- a/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt +++ b/spring-data-mongodb/src/test/kotlin/org/springframework/data/mongodb/core/query/TypedCriteriaExtensionsTests.kt @@ -25,8 +25,11 @@ import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type import java.util.regex.Pattern /** + * Unit tests for [Criteria] extensions. + * * @author Tjeu Kayim * @author Mark Paluch + * @author Sangyong Choi */ class TypedCriteriaExtensionsTests { @@ -319,8 +322,8 @@ class TypedCriteriaExtensionsTests { @Test fun `maxDistance() should equal expected criteria with nearSphere`() { - val point = Point(0.0, 0.0) + val point = Point(0.0, 0.0) val typed = Building::location nearSphere point maxDistance 3.0 val expected = Criteria("location") .nearSphere(point) @@ -331,8 +334,8 @@ class TypedCriteriaExtensionsTests { @Test fun `minDistance() should equal expected criteria with nearSphere`() { - val point = Point(0.0, 0.0) + val point = Point(0.0, 0.0) val typed = Building::location nearSphere point minDistance 3.0 val expected = Criteria("location") .nearSphere(point) @@ -343,8 +346,8 @@ class TypedCriteriaExtensionsTests { @Test fun `maxDistance() should equal expected criteria with near`() { - val point = Point(0.0, 0.0) + val point = Point(0.0, 0.0) val typed = Building::location near point maxDistance 3.0 val expected = Criteria("location") .near(point) @@ -355,8 +358,8 @@ class TypedCriteriaExtensionsTests { @Test fun `minDistance() should equal expected criteria with near`() { - val point = Point(0.0, 0.0) + val point = Point(0.0, 0.0) val typed = Building::location near point minDistance 3.0 val expected = Criteria("location") .near(point) From 36e2d80d71634a134ed2c6d615be217692b59e9a Mon Sep 17 00:00:00 2001 From: Ivan Volzhev Date: Sat, 21 Aug 2021 08:24:22 +0200 Subject: [PATCH 107/983] Relax requirement for GeoJsonMultiPoint construction allowing creation using a single point. Only 1 point is required per GeoJson RFC and Mongo works just fine with 1 point as well. Closes #3776 Original pull request: #3777. --- .../mongodb/core/geo/GeoJsonMultiPoint.java | 18 ++++++++++++++++-- .../data/mongodb/core/geo/GeoJsonTests.java | 16 ++++++++++++++++ 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java index f42d38e0dc..c1c80b89e8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java @@ -28,6 +28,7 @@ * {@link GeoJsonMultiPoint} is defined as list of {@link Point}s. * * @author Christoph Strobl + * @author Ivan Volzhev * @since 1.7 * @see https://geojson.org/geojson-spec.html#multipoint */ @@ -40,12 +41,12 @@ public class GeoJsonMultiPoint implements GeoJson> { /** * Creates a new {@link GeoJsonMultiPoint} for the given {@link Point}s. * - * @param points points must not be {@literal null} and have at least 2 entries. + * @param points points must not be {@literal null} and have at least 1 entry. */ public GeoJsonMultiPoint(List points) { Assert.notNull(points, "Points must not be null."); - Assert.isTrue(points.size() >= 2, "Minimum of 2 Points required."); + Assert.isTrue(points.size() >= 1, "Minimum of 1 Point required."); this.points = new ArrayList(points); } @@ -69,6 +70,19 @@ public GeoJsonMultiPoint(Point first, Point second, Point... others) { this.points.addAll(Arrays.asList(others)); } + /** + * Creates a new {@link GeoJsonMultiPoint} for the given {@link Point}. + * + * @param point must not be {@literal null}. + */ + public GeoJsonMultiPoint(Point point) { + + Assert.notNull(point, "First point must not be null!"); + + this.points = new ArrayList(); + this.points.add(point); + } + /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.geo.GeoJson#getType() diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonTests.java index fa7115c098..6fa053dacd 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoJsonTests.java @@ -63,6 +63,7 @@ /** * @author Christoph Strobl * @author Mark Paluch + * @author Ivan Volzhev */ @ExtendWith({ MongoClientExtension.class, SpringExtension.class }) @ContextConfiguration @@ -329,6 +330,21 @@ public void shouldSaveAndRetrieveDocumentWithGeoJsonMultiPointTypeCorrectly() { assertThat(result.geoJsonMultiPoint).isEqualTo(obj.geoJsonMultiPoint); } + @Test // DATAMONGO-3776 + public void shouldSaveAndRetrieveDocumentWithGeoJsonMultiPointTypeWithOnePointCorrectly() { + + DocumentWithPropertyUsingGeoJsonType obj = new DocumentWithPropertyUsingGeoJsonType(); + obj.id = "geoJsonMultiPoint"; + obj.geoJsonMultiPoint = new GeoJsonMultiPoint(new Point(0, 0)); + + template.save(obj); + + DocumentWithPropertyUsingGeoJsonType result = template.findOne(query(where("id").is(obj.id)), + DocumentWithPropertyUsingGeoJsonType.class); + + assertThat(result.geoJsonMultiPoint).isEqualTo(obj.geoJsonMultiPoint); + } + @Test // DATAMONGO-1137 public void shouldSaveAndRetrieveDocumentWithGeoJsonMultiPolygonTypeCorrectly() { From f71f1074455042e774030314e208950d9e570fad Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 25 Aug 2021 14:57:02 +0200 Subject: [PATCH 108/983] Polishing. Reorder methods. Add since tag. Simplify assertions. Use diamond syntax. See: #3776 Original pull request: #3777. --- .../mongodb/core/geo/GeoJsonMultiPoint.java | 37 ++++++++++--------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java index c1c80b89e8..30af9f7293 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/geo/GeoJsonMultiPoint.java @@ -38,17 +38,31 @@ public class GeoJsonMultiPoint implements GeoJson> { private final List points; + /** + * Creates a new {@link GeoJsonMultiPoint} for the given {@link Point}. + * + * @param point must not be {@literal null}. + * @since 3.2.5 + */ + public GeoJsonMultiPoint(Point point) { + + Assert.notNull(point, "Point must not be null!"); + + this.points = new ArrayList<>(); + this.points.add(point); + } + /** * Creates a new {@link GeoJsonMultiPoint} for the given {@link Point}s. * - * @param points points must not be {@literal null} and have at least 1 entry. + * @param points points must not be {@literal null} and not empty */ public GeoJsonMultiPoint(List points) { - Assert.notNull(points, "Points must not be null."); - Assert.isTrue(points.size() >= 1, "Minimum of 1 Point required."); + Assert.notNull(points, "Points must not be null!"); + Assert.notEmpty(points, "Points must contain at least one point!"); - this.points = new ArrayList(points); + this.points = new ArrayList<>(points); } /** @@ -64,25 +78,12 @@ public GeoJsonMultiPoint(Point first, Point second, Point... others) { Assert.notNull(second, "Second point must not be null!"); Assert.notNull(others, "Additional points must not be null!"); - this.points = new ArrayList(); + this.points = new ArrayList<>(); this.points.add(first); this.points.add(second); this.points.addAll(Arrays.asList(others)); } - /** - * Creates a new {@link GeoJsonMultiPoint} for the given {@link Point}. - * - * @param point must not be {@literal null}. - */ - public GeoJsonMultiPoint(Point point) { - - Assert.notNull(point, "First point must not be null!"); - - this.points = new ArrayList(); - this.points.add(point); - } - /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.geo.GeoJson#getType() From 297ef9823920008245f6271acc1e1212991d89e0 Mon Sep 17 00:00:00 2001 From: divya srivastava Date: Mon, 23 Aug 2021 17:33:06 +0530 Subject: [PATCH 109/983] Add support for `$regexFind`, `$regexFindAll`, and `$regexMatch` aggregation operators. Closes #3725 Original pull request: #3781. --- .../core/aggregation/StringOperators.java | 438 ++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 3 + .../ProjectionOperationUnitTests.java | 27 ++ .../SpelExpressionTransformerUnitTests.java | 64 +++ .../aggregation/StringOperatorsUnitTests.java | 106 +++++ 5 files changed, 638 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java index 5a31f6b3fc..710c6c855e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java @@ -515,6 +515,120 @@ public RTrim rtrim(AggregationExpression expression) { private RTrim createRTrim() { return usesFieldRef() ? RTrim.valueOf(fieldReference) : RTrim.valueOf(expression); } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * regular expression to find the document with the first match.
        + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link RegexFind}. + */ + public RegexFind regexFind(String regex) { + return createRegexFind().regex(regex); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression resulting from the given {@link AggregationExpression} to find the document with the first match.
        + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link RegexFind}. + */ + public RegexFind regexFind(AggregationExpression expression) { + return createRegexFind().regexOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression with the options specified in the argument to find the document with the first match. + * + * @param regex the regular expression to apply + * @param options the options to use + * @return new instance of {@link RegexFind}. + */ + public RegexFind regexFind(String regex,String options) { + return createRegexFind().regex(regex).options(options); + } + + private RegexFind createRegexFind() { + return usesFieldRef() ? RegexFind.valueOf(fieldReference) : RegexFind.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * regular expression to find all the documents with the match.
        + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regexFindAll(String regex) { + return createRegexFindAll().regex(regex); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression resulting from the given {@link AggregationExpression} to find all the documents with the match..
        + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regexFindAll(AggregationExpression expression) { + return createRegexFindAll().regexOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression with the options specified in the argument to find all the documents with the match.. + * + * @param regex the regular expression to apply + * @param options the options to use + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regexFindAll(String regex,String options) { + return createRegexFindAll().regex(regex).options(options); + } + + private RegexFindAll createRegexFindAll() { + return usesFieldRef() ? RegexFindAll.valueOf(fieldReference) : RegexFindAll.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * regular expression to find if a match is found or not.
        + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regexMatch(String regex) { + return createRegexMatch().regex(regex); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression resulting from the given {@link AggregationExpression} to find if a match is found or not.
        + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regexMatch(AggregationExpression expression) { + return createRegexMatch().regexOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression with the options specified in the argument to find if a match is found or not. + * + * @param regex the regular expression to apply + * @param options the options to use + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regexMatch(String regex,String options) { + return createRegexMatch().regex(regex).options(options); + } + + private RegexMatch createRegexMatch() { + return usesFieldRef() ? RegexMatch.valueOf(fieldReference) : RegexMatch.valueOf(expression); + } private boolean usesFieldRef() { return fieldReference != null; @@ -1477,4 +1591,328 @@ protected String getMongoMethod() { return "$rtrim"; } } + + /** + * {@link AggregationExpression} for {@code $regexFind} which applies a regular expression (regex) to a string and + * returns information on the first matched substring.
        + * NOTE: Requires MongoDB 4.0 or later. + * + */ + public static class RegexFind extends AbstractAggregationExpression { + + protected RegexFind(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$regexFind"; + } + + /** + * Creates new {@link RegexFind} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public static RegexFind valueOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexFind(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link RegexFind} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public static RegexFind valueOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFind(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the options to use with the regular expression. + * + * @param options must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind options(String options) { + Assert.notNull(options, "Options must not be null!"); + return new RegexFind(append("options", options)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular expression. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind optionsOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexFind(append("options", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular expression. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind optionsOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFind(append("options", expression)); + } + + /** + * Optional specify the regular expression to apply. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind regex(String regex) { + Assert.notNull(regex, "Regex must not be null!"); + return new RegexFind(append("regex",regex)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the regular expression to apply. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind regexOf(String fieldReference) { + Assert.notNull(fieldReference, "fieldReference must not be null!"); + return new RegexFind(append("regex",Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the regular expression to apply. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind regexOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFind(append("regex",expression)); + } + + } + + /** + * {@link AggregationExpression} for {@code $regexFindAll} which applies a regular expression (regex) to a string and + * returns information on all the matched substrings.
        + * NOTE: Requires MongoDB 4.0 or later. + * + */ + public static class RegexFindAll extends AbstractAggregationExpression { + + protected RegexFindAll(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$regexFindAll"; + } + + /** + * Creates new {@link RegexFindAll} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public static RegexFindAll valueOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexFindAll(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link RegexFindAll} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public static RegexFindAll valueOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFindAll(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the options to use with the regular expression. + * + * @param options must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll options(String options) { + Assert.notNull(options, "Options must not be null!"); + return new RegexFindAll(append("options", options)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular expression. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll optionsOf(String fieldReference) { + Assert.notNull(fieldReference, "fieldReference must not be null!"); + return new RegexFindAll(append("options", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular expression. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll optionsOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFindAll(append("options", expression)); + } + + /** + * Optional specify the regular expression to apply. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regex(String regex) { + Assert.notNull(regex, "Regex must not be null!"); + return new RegexFindAll(append("regex",regex)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the regular expression to apply. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regexOf(String fieldReference) { + Assert.notNull(fieldReference, "fieldReference must not be null!"); + return new RegexFindAll(append("regex",Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the regular expression to apply. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll regexOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFindAll(append("regex",expression)); + } + + } + + /** + * {@link AggregationExpression} for {@code $regexMatch} which applies a regular expression (regex) to a string and + * returns a boolean that indicates if a match is found or not.
        + * NOTE: Requires MongoDB 4.0 or later. + * + */ + public static class RegexMatch extends AbstractAggregationExpression { + + protected RegexMatch(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$regexMatch"; + } + + /** + * Creates new {@link RegexMatch} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public static RegexMatch valueOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexMatch(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link RegexMatch} using the result of the provided {@link AggregationExpression} as {@literal input} + * value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public static RegexMatch valueOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexMatch(Collections.singletonMap("input", expression)); + } + + /** + * Optional specify the options to use with the regular expression. + * + * @param options must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch options(String options) { + Assert.notNull(options, "Options must not be null!"); + return new RegexMatch(append("options", options)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular expression. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch optionsOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexMatch(append("options", Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular expression. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch optionsOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexMatch(append("options", expression)); + } + + /** + * Optional specify the regular expression to apply. + * + * @param regex must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regex(String regex) { + Assert.notNull(regex, "Regex must not be null!"); + return new RegexMatch(append("regex",regex)); + } + + /** + * Optional specify the reference to the {@link Field field} holding the regular expression to apply. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regexOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexMatch(append("regex",Fields.field(fieldReference))); + } + + /** + * Optional specify the {@link AggregationExpression} evaluating to the regular expression to apply. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch regexOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexMatch(append("regex",expression)); + } + + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index a91358353c..0fbfe51f09 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -116,6 +116,9 @@ public class MethodReferenceNode extends ExpressionNode { map.put("trim", mapArgRef().forOperator("$trim").mappingParametersTo("input", "chars")); map.put("ltrim", mapArgRef().forOperator("$ltrim").mappingParametersTo("input", "chars")); map.put("rtrim", mapArgRef().forOperator("$rtrim").mappingParametersTo("input", "chars")); + map.put("regexFind", mapArgRef().forOperator("$regexFind").mappingParametersTo("input", "regex" , "options")); + map.put("regexFindAll", mapArgRef().forOperator("$regexFindAll").mappingParametersTo("input", "regex" , "options")); + map.put("regexMatch", mapArgRef().forOperator("$regexMatch").mappingParametersTo("input", "regex" , "options")); // TEXT SEARCH OPERATORS map.put("meta", singleArgRef().forOperator("$meta")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java index 805fc10f38..ff6771d9f1 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java @@ -1766,6 +1766,33 @@ public void shouldRenderSubstrCPCorrectly() { assertThat(agg) .isEqualTo(Document.parse("{ $project : { yearSubstring: { $substrCP: [ \"$quarter\", 0, 2 ] } } }")); } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindCorrectly() { + + Document agg = project().and(StringOperators.valueOf("field1").regexFind("e")).as("regex") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { regex: { $regexFind: { \"input\" : \"$field1\", \"regex\" : \"e\" } } } }")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindAllCorrectly() { + + Document agg = project().and(StringOperators.valueOf("field1").regexFindAll("e")).as("regex") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { regex: { $regexFindAll: { \"input\" : \"$field1\", \"regex\" : \"e\" } } } }")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexMatchCorrectly() { + + Document agg = project().and(StringOperators.valueOf("field1").regexMatch("e")).as("regex") + .toDocument(Aggregation.DEFAULT_CONTEXT); + + assertThat(agg).isEqualTo(Document.parse("{ $project : { regex: { $regexMatch: { \"input\" : \"$field1\", \"regex\" : \"e\" } } } }")); + } @Test // DATAMONGO-1548 public void shouldRenderIndexOfArrayCorrectly() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index daba7a21cd..41b0323636 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -800,6 +800,70 @@ void shouldRenderRtrimWithCharsFromFieldReference() { assertThat(transform("rtrim(field1, field2)")) .isEqualTo("{ \"$rtrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}"); } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexFindWithoutOptions() { + + assertThat(transform("regexFind(field1,'e')")) + .isEqualTo(Document.parse("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}")); + } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexFindWithOptions() { + + assertThat(transform("regexFind(field1,'e','i')")) + .isEqualTo(Document.parse("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}")); + } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexFindWithOptionsFromFieldReference() { + + assertThat(transform("regexFind(field1,'e',field2)")) + .isEqualTo(Document.parse("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}")); + } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexFindAllWithoutOptions() { + + assertThat(transform("regexFindAll(field1,'e')")) + .isEqualTo(Document.parse("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}")); + } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexFindAllWithOptions() { + + assertThat(transform("regexFindAll(field1,'e','i')")) + .isEqualTo(Document.parse("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}")); + } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexFindAllWithOptionsFromFieldReference() { + + assertThat(transform("regexFindAll(field1,'e',field2)")) + .isEqualTo(Document.parse("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}")); + } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexMatchWithoutOptions() { + + assertThat(transform("regexMatch(field1,'e')")) + .isEqualTo(Document.parse("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}")); + } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexMatchWithOptions() { + + assertThat(transform("regexMatch(field1,'e','i')")) + .isEqualTo(Document.parse("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}")); + } + + @Test // DATAMONGO-3725 + public void shouldRenderRegexMatchWithOptionsFromFieldReference() { + + assertThat(transform("regexMatch(field1,'e',field2)")) + .isEqualTo(Document.parse("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}")); + } + @Test // DATAMONGO-2077 void shouldRenderConvertWithoutOptionalParameters() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java index 0dbe362ae4..cdd0b38dbc 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java @@ -144,5 +144,111 @@ public void shouldRenderRTrimWithCharsExpression() { assertThat(StringOperators.valueOf("shrewd").rtrim(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo(Document.parse("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } ")); } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindAll() { + + assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindAllForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).regexFindAll("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindAllForRegexExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexFindAll(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindAllWithOptions() { + + assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindAllWithOptionsExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + " } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexMatch() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexMatchForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).regexMatch("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexMatchForRegexExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexMatchWithOptions() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexMatchWithOptionsExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + " } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFind() { + + assertThat(StringOperators.valueOf("shrewd").regexFind("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindForExpression() { + + assertThat(StringOperators.valueOf(EXPRESSION).regexFind("e").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFind: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindForRegexExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexFind(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindWithOptions() { + + assertThat(StringOperators.valueOf("shrewd").regexFind("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFind: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } ")); + } + + @Test // DATAMONGO - 3725 + public void shouldRenderRegexFindWithOptionsExpression() { + + assertThat(StringOperators.valueOf("shrewd").regexFind("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo(Document.parse("{ $regexFind: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + " } } ")); + } + } From 69b582823a53cf3a1b8a07354da942bd94432f4f Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Thu, 26 Aug 2021 12:20:11 +0200 Subject: [PATCH 110/983] Polishing. Add support for Pattern. Extract Regex flags translation from Criteria into RegexFlags utility class. Add since and author tags. Simplify tests. Update reference documentation. See #3725. Original pull request: #3781. --- .../core/aggregation/StringOperators.java | 371 ++++++++++----- .../data/mongodb/core/query/Criteria.java | 58 +-- .../data/mongodb/util/RegexFlags.java | 116 +++++ .../ProjectionOperationUnitTests.java | 433 +++++++++--------- .../SpelExpressionTransformerUnitTests.java | 89 ++-- .../aggregation/StringOperatorsUnitTests.java | 226 +++++---- .../reference/aggregation-framework.adoc | 2 +- 7 files changed, 776 insertions(+), 519 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java index 710c6c855e..8b6bb03875 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/StringOperators.java @@ -18,8 +18,11 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Map; +import java.util.regex.Pattern; import org.springframework.data.domain.Range; +import org.springframework.data.mongodb.util.RegexFlags; import org.springframework.util.Assert; /** @@ -27,6 +30,7 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Divya Srivastava * @since 1.10 */ public class StringOperators { @@ -515,117 +519,170 @@ public RTrim rtrim(AggregationExpression expression) { private RTrim createRTrim() { return usesFieldRef() ? RTrim.valueOf(fieldReference) : RTrim.valueOf(expression); } - + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given * regular expression to find the document with the first match.
        * NOTE: Requires MongoDB 4.0 or later. * + * @param regex must not be {@literal null}. * @return new instance of {@link RegexFind}. + * @since 3.3 */ public RegexFind regexFind(String regex) { return createRegexFind().regex(regex); } - + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular - * expression resulting from the given {@link AggregationExpression} to find the document with the first match.
        + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression resulting from the given {@link AggregationExpression} to find the document with the first + * match.
        * NOTE: Requires MongoDB 4.0 or later. * + * @param expression must not be {@literal null}. * @return new instance of {@link RegexFind}. + * @since 3.3 */ public RegexFind regexFind(AggregationExpression expression) { return createRegexFind().regexOf(expression); } - + + /** + * Creates new {@link AggregationExpression} that takes the {@link Pattern} and applies the regular expression with + * the options specified in the argument to find the document with the first match. + * + * @param pattern the pattern object to apply. + * @return new instance of {@link RegexFind}. + * @since 3.3 + */ + public RegexFind regexFind(Pattern pattern) { + return createRegexFind().pattern(pattern); + } + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular * expression with the options specified in the argument to find the document with the first match. * - * @param regex the regular expression to apply - * @param options the options to use + * @param regex the regular expression to apply. + * @param options the options to use. * @return new instance of {@link RegexFind}. + * @since 3.3 */ - public RegexFind regexFind(String regex,String options) { + public RegexFind regexFind(String regex, String options) { return createRegexFind().regex(regex).options(options); } - + private RegexFind createRegexFind() { return usesFieldRef() ? RegexFind.valueOf(fieldReference) : RegexFind.valueOf(expression); } - + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given * regular expression to find all the documents with the match.
        * NOTE: Requires MongoDB 4.0 or later. * + * @param regex must not be {@literal null}. * @return new instance of {@link RegexFindAll}. + * @since 3.3 */ public RegexFindAll regexFindAll(String regex) { return createRegexFindAll().regex(regex); } - + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular - * expression resulting from the given {@link AggregationExpression} to find all the documents with the match..
        + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression resulting from the given {@link AggregationExpression} to find all the documents with the + * match..
        * NOTE: Requires MongoDB 4.0 or later. * + * @param expression must not be {@literal null}. * @return new instance of {@link RegexFindAll}. + * @since 3.3 */ public RegexFindAll regexFindAll(AggregationExpression expression) { return createRegexFindAll().regexOf(expression); } - + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular - * expression with the options specified in the argument to find all the documents with the match.. + * Creates new {@link AggregationExpression} that takes a {@link Pattern} and applies the regular expression with + * the options specified in the argument to find all the documents with the match. * - * @param regex the regular expression to apply - * @param options the options to use + * @param pattern the pattern object to apply. * @return new instance of {@link RegexFindAll}. + * @since 3.3 */ - public RegexFindAll regexFindAll(String regex,String options) { + public RegexFindAll regexFindAll(Pattern pattern) { + return createRegexFindAll().pattern(pattern); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * expression with the options specified in the argument to find all the documents with the match. + * + * @param regex the regular expression to apply. + * @param options the options to use. + * @return new instance of {@link RegexFindAll}. + * @since 3.3 + */ + public RegexFindAll regexFindAll(String regex, String options) { return createRegexFindAll().regex(regex).options(options); } - + private RegexFindAll createRegexFindAll() { return usesFieldRef() ? RegexFindAll.valueOf(fieldReference) : RegexFindAll.valueOf(expression); } - + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the given * regular expression to find if a match is found or not.
        * NOTE: Requires MongoDB 4.0 or later. * + * @param regex must not be {@literal null}. * @return new instance of {@link RegexMatch}. + * @since 3.3 */ public RegexMatch regexMatch(String regex) { return createRegexMatch().regex(regex); } - + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular * expression resulting from the given {@link AggregationExpression} to find if a match is found or not.
        * NOTE: Requires MongoDB 4.0 or later. * + * @param expression must not be {@literal null}. * @return new instance of {@link RegexMatch}. + * @since 3.3 */ public RegexMatch regexMatch(AggregationExpression expression) { return createRegexMatch().regexOf(expression); } - + + /** + * Creates new {@link AggregationExpression} that takes a {@link Pattern} and applies the regular expression with + * the options specified in the argument to find if a match is found or not. + * + * @param pattern the pattern object to apply. + * @return new instance of {@link RegexMatch}. + * @since 3.3 + */ + public RegexMatch regexMatch(Pattern pattern) { + return createRegexMatch().pattern(pattern); + } + /** - * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular + * Creates new {@link AggregationExpression} that takes the associated string representation and applies the regular * expression with the options specified in the argument to find if a match is found or not. * - * @param regex the regular expression to apply - * @param options the options to use + * @param regex the regular expression to apply. + * @param options the options to use. * @return new instance of {@link RegexMatch}. + * @since 3.3 */ - public RegexMatch regexMatch(String regex,String options) { + public RegexMatch regexMatch(String regex, String options) { return createRegexMatch().regex(regex).options(options); } - + private RegexMatch createRegexMatch() { return usesFieldRef() ? RegexMatch.valueOf(fieldReference) : RegexMatch.valueOf(expression); } @@ -1591,35 +1648,35 @@ protected String getMongoMethod() { return "$rtrim"; } } - + /** - * {@link AggregationExpression} for {@code $regexFind} which applies a regular expression (regex) to a string and + * {@link AggregationExpression} for {@code $regexFind} which applies a regular expression (regex) to a string and * returns information on the first matched substring.
        * NOTE: Requires MongoDB 4.0 or later. * + * @author Divya Srivastava + * @since 3.3 */ public static class RegexFind extends AbstractAggregationExpression { - + protected RegexFind(Object value) { super(value); } - @Override - protected String getMongoMethod() { - return "$regexFind"; - } - /** - * Creates new {@link RegexFind} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * Creates new {@link RegexFind} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexFind}. */ public static RegexFind valueOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexFind(Collections.singletonMap("input", Fields.field(fieldReference))); } - + /** * Creates new {@link RegexFind} using the result of the provided {@link AggregationExpression} as {@literal input} * value. @@ -1628,10 +1685,12 @@ public static RegexFind valueOf(String fieldReference) { * @return new instance of {@link RegexFind}. */ public static RegexFind valueOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFind(Collections.singletonMap("input", expression)); } - + /** * Optional specify the options to use with the regular expression. * @@ -1639,72 +1698,108 @@ public static RegexFind valueOf(AggregationExpression expression) { * @return new instance of {@link RegexFind}. */ public RegexFind options(String options) { + Assert.notNull(options, "Options must not be null!"); + return new RegexFind(append("options", options)); } - + /** - * Optional specify the reference to the {@link Field field} holding the options values to use with the regular expression. + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular + * expression. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexFind}. */ public RegexFind optionsOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexFind(append("options", Fields.field(fieldReference))); } - + /** - * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular expression. + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular + * expression. * * @param expression must not be {@literal null}. * @return new instance of {@link RegexFind}. */ public RegexFind optionsOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFind(append("options", expression)); } - + /** - * Optional specify the regular expression to apply. + * Specify the regular expression to apply. * * @param regex must not be {@literal null}. * @return new instance of {@link RegexFind}. */ public RegexFind regex(String regex) { + Assert.notNull(regex, "Regex must not be null!"); - return new RegexFind(append("regex",regex)); + + return new RegexFind(append("regex", regex)); } - + /** - * Optional specify the reference to the {@link Field field} holding the regular expression to apply. + * Apply a {@link Pattern} into {@code regex} and {@code options} fields. + * + * @param pattern must not be {@literal null}. + * @return new instance of {@link RegexFind}. + */ + public RegexFind pattern(Pattern pattern) { + + Assert.notNull(pattern, "Pattern must not be null!"); + + Map regex = append("regex", pattern.pattern()); + regex.put("options", RegexFlags.toRegexOptions(pattern.flags())); + + return new RegexFind(regex); + } + + /** + * Specify the reference to the {@link Field field} holding the regular expression to apply. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexFind}. */ public RegexFind regexOf(String fieldReference) { + Assert.notNull(fieldReference, "fieldReference must not be null!"); - return new RegexFind(append("regex",Fields.field(fieldReference))); + + return new RegexFind(append("regex", Fields.field(fieldReference))); } - + /** - * Optional specify the {@link AggregationExpression} evaluating to the regular expression to apply. + * Specify the {@link AggregationExpression} evaluating to the regular expression to apply. * * @param expression must not be {@literal null}. * @return new instance of {@link RegexFind}. */ public RegexFind regexOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); - return new RegexFind(append("regex",expression)); + + return new RegexFind(append("regex", expression)); } + @Override + protected String getMongoMethod() { + return "$regexFind"; + } } - + /** - * {@link AggregationExpression} for {@code $regexFindAll} which applies a regular expression (regex) to a string and + * {@link AggregationExpression} for {@code $regexFindAll} which applies a regular expression (regex) to a string and * returns information on all the matched substrings.
        * NOTE: Requires MongoDB 4.0 or later. * + * @author Divya Srivastava + * @since 3.3 */ public static class RegexFindAll extends AbstractAggregationExpression { @@ -1712,13 +1807,9 @@ protected RegexFindAll(Object value) { super(value); } - @Override - protected String getMongoMethod() { - return "$regexFindAll"; - } - /** - * Creates new {@link RegexFindAll} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * Creates new {@link RegexFindAll} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexFindAll}. @@ -1727,19 +1818,21 @@ public static RegexFindAll valueOf(String fieldReference) { Assert.notNull(fieldReference, "FieldReference must not be null!"); return new RegexFindAll(Collections.singletonMap("input", Fields.field(fieldReference))); } - + /** - * Creates new {@link RegexFindAll} using the result of the provided {@link AggregationExpression} as {@literal input} - * value. + * Creates new {@link RegexFindAll} using the result of the provided {@link AggregationExpression} as + * {@literal input} value. * * @param expression must not be {@literal null}. * @return new instance of {@link RegexFindAll}. */ public static RegexFindAll valueOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFindAll(Collections.singletonMap("input", expression)); } - + /** * Optional specify the options to use with the regular expression. * @@ -1747,72 +1840,108 @@ public static RegexFindAll valueOf(AggregationExpression expression) { * @return new instance of {@link RegexFindAll}. */ public RegexFindAll options(String options) { + Assert.notNull(options, "Options must not be null!"); + return new RegexFindAll(append("options", options)); } - + /** - * Optional specify the reference to the {@link Field field} holding the options values to use with the regular expression. + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular + * expression. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexFindAll}. */ public RegexFindAll optionsOf(String fieldReference) { + Assert.notNull(fieldReference, "fieldReference must not be null!"); + return new RegexFindAll(append("options", Fields.field(fieldReference))); } - + /** - * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular expression. + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular + * expression. * * @param expression must not be {@literal null}. * @return new instance of {@link RegexFindAll}. */ public RegexFindAll optionsOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexFindAll(append("options", expression)); } - + /** - * Optional specify the regular expression to apply. + * Apply a {@link Pattern} into {@code regex} and {@code options} fields. + * + * @param pattern must not be {@literal null}. + * @return new instance of {@link RegexFindAll}. + */ + public RegexFindAll pattern(Pattern pattern) { + + Assert.notNull(pattern, "Pattern must not be null!"); + + Map regex = append("regex", pattern.pattern()); + regex.put("options", RegexFlags.toRegexOptions(pattern.flags())); + + return new RegexFindAll(regex); + } + + /** + * Specify the regular expression to apply. * * @param regex must not be {@literal null}. * @return new instance of {@link RegexFindAll}. */ public RegexFindAll regex(String regex) { + Assert.notNull(regex, "Regex must not be null!"); - return new RegexFindAll(append("regex",regex)); + + return new RegexFindAll(append("regex", regex)); } - + /** - * Optional specify the reference to the {@link Field field} holding the regular expression to apply. + * Specify the reference to the {@link Field field} holding the regular expression to apply. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexFindAll}. */ public RegexFindAll regexOf(String fieldReference) { + Assert.notNull(fieldReference, "fieldReference must not be null!"); - return new RegexFindAll(append("regex",Fields.field(fieldReference))); + + return new RegexFindAll(append("regex", Fields.field(fieldReference))); } - + /** - * Optional specify the {@link AggregationExpression} evaluating to the regular expression to apply. + * Specify the {@link AggregationExpression} evaluating to the regular expression to apply. * * @param expression must not be {@literal null}. * @return new instance of {@link RegexFindAll}. */ public RegexFindAll regexOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); - return new RegexFindAll(append("regex",expression)); + + return new RegexFindAll(append("regex", expression)); } + @Override + protected String getMongoMethod() { + return "$regexFindAll"; + } } - + /** - * {@link AggregationExpression} for {@code $regexMatch} which applies a regular expression (regex) to a string and + * {@link AggregationExpression} for {@code $regexMatch} which applies a regular expression (regex) to a string and * returns a boolean that indicates if a match is found or not.
        * NOTE: Requires MongoDB 4.0 or later. * + * @author Divya Srivastava + * @since 3.3 */ public static class RegexMatch extends AbstractAggregationExpression { @@ -1820,22 +1949,20 @@ protected RegexMatch(Object value) { super(value); } - @Override - protected String getMongoMethod() { - return "$regexMatch"; - } - /** - * Creates new {@link RegexMatch} using the value of the provided {@link Field fieldReference} as {@literal input} value. + * Creates new {@link RegexMatch} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexMatch}. */ public static RegexMatch valueOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexMatch(Collections.singletonMap("input", Fields.field(fieldReference))); } - + /** * Creates new {@link RegexMatch} using the result of the provided {@link AggregationExpression} as {@literal input} * value. @@ -1844,10 +1971,12 @@ public static RegexMatch valueOf(String fieldReference) { * @return new instance of {@link RegexMatch}. */ public static RegexMatch valueOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexMatch(Collections.singletonMap("input", expression)); } - + /** * Optional specify the options to use with the regular expression. * @@ -1855,54 +1984,82 @@ public static RegexMatch valueOf(AggregationExpression expression) { * @return new instance of {@link RegexMatch}. */ public RegexMatch options(String options) { + Assert.notNull(options, "Options must not be null!"); + return new RegexMatch(append("options", options)); } - + /** - * Optional specify the reference to the {@link Field field} holding the options values to use with the regular expression. + * Optional specify the reference to the {@link Field field} holding the options values to use with the regular + * expression. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexMatch}. */ public RegexMatch optionsOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new RegexMatch(append("options", Fields.field(fieldReference))); } - + /** - * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular expression. + * Optional specify the {@link AggregationExpression} evaluating to the options values to use with the regular + * expression. * * @param expression must not be {@literal null}. * @return new instance of {@link RegexMatch}. */ public RegexMatch optionsOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new RegexMatch(append("options", expression)); } - + /** - * Optional specify the regular expression to apply. + * Apply a {@link Pattern} into {@code regex} and {@code options} fields. + * + * @param pattern must not be {@literal null}. + * @return new instance of {@link RegexMatch}. + */ + public RegexMatch pattern(Pattern pattern) { + + Assert.notNull(pattern, "Pattern must not be null!"); + + Map regex = append("regex", pattern.pattern()); + regex.put("options", RegexFlags.toRegexOptions(pattern.flags())); + + return new RegexMatch(regex); + } + + /** + * Specify the regular expression to apply. * * @param regex must not be {@literal null}. * @return new instance of {@link RegexMatch}. */ public RegexMatch regex(String regex) { + Assert.notNull(regex, "Regex must not be null!"); - return new RegexMatch(append("regex",regex)); + + return new RegexMatch(append("regex", regex)); } - + /** - * Optional specify the reference to the {@link Field field} holding the regular expression to apply. + * Specify the reference to the {@link Field field} holding the regular expression to apply. * * @param fieldReference must not be {@literal null}. * @return new instance of {@link RegexMatch}. */ public RegexMatch regexOf(String fieldReference) { + Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new RegexMatch(append("regex",Fields.field(fieldReference))); + + return new RegexMatch(append("regex", Fields.field(fieldReference))); } - + /** * Optional specify the {@link AggregationExpression} evaluating to the regular expression to apply. * @@ -1910,9 +2067,15 @@ public RegexMatch regexOf(String fieldReference) { * @return new instance of {@link RegexMatch}. */ public RegexMatch regexOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); - return new RegexMatch(append("regex",expression)); + + return new RegexMatch(append("regex", expression)); } + @Override + protected String getMongoMethod() { + return "$regexMatch"; + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java index 9b1e8df940..f9a354c38f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java @@ -42,6 +42,7 @@ import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.util.RegexFlags; import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.Base64Utils; @@ -71,20 +72,6 @@ public class Criteria implements CriteriaDefinition { */ private static final Object NOT_SET = new Object(); - private static final int[] FLAG_LOOKUP = new int[Character.MAX_VALUE]; - - static { - FLAG_LOOKUP['g'] = 256; - FLAG_LOOKUP['i'] = Pattern.CASE_INSENSITIVE; - FLAG_LOOKUP['m'] = Pattern.MULTILINE; - FLAG_LOOKUP['s'] = Pattern.DOTALL; - FLAG_LOOKUP['c'] = Pattern.CANON_EQ; - FLAG_LOOKUP['x'] = Pattern.COMMENTS; - FLAG_LOOKUP['d'] = Pattern.UNIX_LINES; - FLAG_LOOKUP['t'] = Pattern.LITERAL; - FLAG_LOOKUP['u'] = Pattern.UNICODE_CASE; - } - private @Nullable String key; private List criteriaChain; private LinkedHashMap criteria = new LinkedHashMap(); @@ -530,7 +517,7 @@ private Pattern toPattern(String regex, @Nullable String options) { Assert.notNull(regex, "Regex string must not be null!"); - return Pattern.compile(regex, regexFlags(options)); + return Pattern.compile(regex, RegexFlags.toRegexFlags(options)); } /** @@ -1099,47 +1086,6 @@ private static boolean requiresGeoJsonFormat(Object value) { || (value instanceof GeoCommand && ((GeoCommand) value).getShape() instanceof GeoJson); } - /** - * Lookup the MongoDB specific flags for a given regex option string. - * - * @param s the Regex option/flag to look up. Can be {@literal null}. - * @return zero if given {@link String} is {@literal null} or empty. - * @since 2.2 - */ - private static int regexFlags(@Nullable String s) { - - int flags = 0; - - if (s == null) { - return flags; - } - - for (final char f : s.toLowerCase().toCharArray()) { - flags |= regexFlag(f); - } - - return flags; - } - - /** - * Lookup the MongoDB specific flags for a given character. - * - * @param c the Regex option/flag to look up. - * @return - * @throws IllegalArgumentException for unknown flags - * @since 2.2 - */ - private static int regexFlag(char c) { - - int flag = FLAG_LOOKUP[c]; - - if (flag == 0) { - throw new IllegalArgumentException(String.format("Unrecognized flag [%c]", c)); - } - - return flag; - } - /** * MongoDB specific bitwise query * operators like {@code $bitsAllClear, $bitsAllSet,...} for usage with {@link Criteria#bits()} and {@link Query}. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java new file mode 100644 index 0000000000..dfee94954c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java @@ -0,0 +1,116 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import java.util.regex.Pattern; + +import org.springframework.lang.Nullable; + +/** + * Utility to translate {@link Pattern#flags() regex flags} to MongoDB regex options and vice versa. + * + * @author Mark Paluch + * @since 3.3 + */ +public abstract class RegexFlags { + + private static final int[] FLAG_LOOKUP = new int[Character.MAX_VALUE]; + + static { + FLAG_LOOKUP['g'] = 256; + FLAG_LOOKUP['i'] = Pattern.CASE_INSENSITIVE; + FLAG_LOOKUP['m'] = Pattern.MULTILINE; + FLAG_LOOKUP['s'] = Pattern.DOTALL; + FLAG_LOOKUP['c'] = Pattern.CANON_EQ; + FLAG_LOOKUP['x'] = Pattern.COMMENTS; + FLAG_LOOKUP['d'] = Pattern.UNIX_LINES; + FLAG_LOOKUP['t'] = Pattern.LITERAL; + FLAG_LOOKUP['u'] = Pattern.UNICODE_CASE; + } + + private RegexFlags() { + + } + + /** + * Lookup the MongoDB specific options from given {@link Pattern#flags() flags}. + * + * @param flags the Regex flags to look up. + * @return the options string. May be empty. + */ + public static String toRegexOptions(int flags) { + + if (flags == 0) { + return ""; + } + + StringBuilder buf = new StringBuilder(); + + for (int i = 'a'; i < 'z'; i++) { + + if (FLAG_LOOKUP[i] == 0) { + continue; + } + + if ((flags & FLAG_LOOKUP[i]) > 0) { + buf.append((char) i); + } + } + + return buf.toString(); + } + + /** + * Lookup the MongoDB specific flags for a given regex option string. + * + * @param s the Regex option/flag to look up. Can be {@literal null}. + * @return zero if given {@link String} is {@literal null} or empty. + * @since 2.2 + */ + public static int toRegexFlags(@Nullable String s) { + + int flags = 0; + + if (s == null) { + return flags; + } + + for (char f : s.toLowerCase().toCharArray()) { + flags |= toRegexFlag(f); + } + + return flags; + } + + /** + * Lookup the MongoDB specific flags for a given character. + * + * @param c the Regex option/flag to look up. + * @return + * @throws IllegalArgumentException for unknown flags + * @since 2.2 + */ + public static int toRegexFlag(char c) { + + int flag = FLAG_LOOKUP[c]; + + if (flag == 0) { + throw new IllegalArgumentException(String.format("Unrecognized flag [%c]", c)); + } + + return flag; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java index ff6771d9f1..9ef207c9ad 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ProjectionOperationUnitTests.java @@ -55,24 +55,25 @@ * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Divya Srivastava * @author Mark Paluch */ public class ProjectionOperationUnitTests { - static final String MOD = "$mod"; - static final String ADD = "$add"; - static final String SUBTRACT = "$subtract"; - static final String MULTIPLY = "$multiply"; - static final String DIVIDE = "$divide"; - static final String PROJECT = "$project"; + private static final String MOD = "$mod"; + private static final String ADD = "$add"; + private static final String SUBTRACT = "$subtract"; + private static final String MULTIPLY = "$multiply"; + private static final String DIVIDE = "$divide"; + private static final String PROJECT = "$project"; @Test // DATAMONGO-586 - public void rejectsNullFields() { + void rejectsNullFields() { assertThatIllegalArgumentException().isThrownBy(() -> new ProjectionOperation((Fields) null)); } @Test // DATAMONGO-586 - public void declaresBackReferenceCorrectly() { + void declaresBackReferenceCorrectly() { ProjectionOperation operation = new ProjectionOperation(); operation = operation.and("prop").previousOperation(); @@ -83,7 +84,7 @@ public void declaresBackReferenceCorrectly() { } @Test // DATAMONGO-586 - public void alwaysUsesExplicitReference() { + void alwaysUsesExplicitReference() { ProjectionOperation operation = new ProjectionOperation(Fields.fields("foo").and("bar", "foobar")); @@ -95,7 +96,7 @@ public void alwaysUsesExplicitReference() { } @Test // DATAMONGO-586 - public void aliasesSimpleFieldProjection() { + void aliasesSimpleFieldProjection() { ProjectionOperation operation = new ProjectionOperation(); @@ -106,7 +107,7 @@ public void aliasesSimpleFieldProjection() { } @Test // DATAMONGO-586 - public void aliasesArithmeticProjection() { + void aliasesArithmeticProjection() { ProjectionOperation operation = new ProjectionOperation(); @@ -121,7 +122,7 @@ public void aliasesArithmeticProjection() { } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationWithoutAlias() { + void arithmeticProjectionOperationWithoutAlias() { String fieldName = "a"; ProjectionOperationBuilder operation = new ProjectionOperation().and(fieldName).plus(1); @@ -134,7 +135,7 @@ public void arithmeticProjectionOperationWithoutAlias() { } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationPlus() { + void arithmeticProjectionOperationPlus() { String fieldName = "a"; String fieldAlias = "b"; @@ -148,7 +149,7 @@ public void arithmeticProjectionOperationPlus() { } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationMinus() { + void arithmeticProjectionOperationMinus() { String fieldName = "a"; String fieldAlias = "b"; @@ -162,7 +163,7 @@ public void arithmeticProjectionOperationMinus() { } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationMultiply() { + void arithmeticProjectionOperationMultiply() { String fieldName = "a"; String fieldAlias = "b"; @@ -176,7 +177,7 @@ public void arithmeticProjectionOperationMultiply() { } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationDivide() { + void arithmeticProjectionOperationDivide() { String fieldName = "a"; String fieldAlias = "b"; @@ -190,12 +191,12 @@ public void arithmeticProjectionOperationDivide() { } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationDivideByZeroException() { + void arithmeticProjectionOperationDivideByZeroException() { assertThatIllegalArgumentException().isThrownBy(() -> new ProjectionOperation().and("a").divide(0)); } @Test // DATAMONGO-586 - public void arithmeticProjectionOperationMod() { + void arithmeticProjectionOperationMod() { String fieldName = "a"; String fieldAlias = "b"; @@ -209,7 +210,7 @@ public void arithmeticProjectionOperationMod() { } @Test // DATAMONGO-758, DATAMONGO-1893 - public void excludeShouldAllowExclusionOfFieldsOtherThanUnderscoreId/* since MongoDB 3.4 */() { + void excludeShouldAllowExclusionOfFieldsOtherThanUnderscoreId/* since MongoDB 3.4 */() { ProjectionOperation projectionOp = new ProjectionOperation().andExclude("foo"); Document document = projectionOp.toDocument(Aggregation.DEFAULT_CONTEXT); @@ -220,7 +221,7 @@ public void arithmeticProjectionOperationMod() { } @Test // DATAMONGO-1893 - public void includeShouldNotInheritFields() { + void includeShouldNotInheritFields() { ProjectionOperation projectionOp = new ProjectionOperation().andInclude("foo"); @@ -228,7 +229,7 @@ public void includeShouldNotInheritFields() { } @Test // DATAMONGO-758 - public void excludeShouldAllowExclusionOfUnderscoreId() { + void excludeShouldAllowExclusionOfUnderscoreId() { ProjectionOperation projectionOp = new ProjectionOperation().andExclude(Fields.UNDERSCORE_ID); Document document = projectionOp.toDocument(Aggregation.DEFAULT_CONTEXT); @@ -237,7 +238,7 @@ public void excludeShouldAllowExclusionOfUnderscoreId() { } @Test // DATAMONGO-1906 - public void rendersConditionalProjectionCorrectly() { + void rendersConditionalProjectionCorrectly() { TypedAggregation aggregation = Aggregation.newAggregation(Book.class, Aggregation.project("title") @@ -252,7 +253,7 @@ public void rendersConditionalProjectionCorrectly() { } @Test // DATAMONGO-757 - public void usesImplictAndExplicitFieldAliasAndIncludeExclude() { + void usesImplictAndExplicitFieldAliasAndIncludeExclude() { ProjectionOperation operation = Aggregation.project("foo").and("foobar").as("bar").andInclude("inc1", "inc2") .andExclude("_id"); @@ -268,12 +269,12 @@ public void usesImplictAndExplicitFieldAliasAndIncludeExclude() { } @Test - public void arithmeticProjectionOperationModByZeroException() { + void arithmeticProjectionOperationModByZeroException() { assertThatIllegalArgumentException().isThrownBy(() -> new ProjectionOperation().and("a").mod(0)); } @Test // DATAMONGO-769 - public void allowArithmeticOperationsWithFieldReferences() { + void allowArithmeticOperationsWithFieldReferences() { ProjectionOperation operation = Aggregation.project() // .and("foo").plus("bar").as("fooPlusBar") // @@ -298,7 +299,7 @@ public void allowArithmeticOperationsWithFieldReferences() { } @Test // DATAMONGO-774 - public void projectionExpressions() { + void projectionExpressions() { ProjectionOperation operation = Aggregation.project() // .andExpression("(netPrice + surCharge) * taxrate * [0]", 2).as("grossSalesPrice") // @@ -310,7 +311,7 @@ public void projectionExpressions() { } @Test // DATAMONGO-975 - public void shouldRenderDateTimeFragmentExtractionsForSimpleFieldProjectionsCorrectly() { + void shouldRenderDateTimeFragmentExtractionsForSimpleFieldProjectionsCorrectly() { ProjectionOperation operation = Aggregation.project() // .and("date").extractHour().as("hour") // @@ -343,7 +344,7 @@ public void shouldRenderDateTimeFragmentExtractionsForSimpleFieldProjectionsCorr } @Test // DATAMONGO-975 - public void shouldRenderDateTimeFragmentExtractionsForExpressionProjectionsCorrectly() throws Exception { + void shouldRenderDateTimeFragmentExtractionsForExpressionProjectionsCorrectly() throws Exception { ProjectionOperation operation = Aggregation.project() // .andExpression("date + 86400000") // @@ -360,7 +361,7 @@ public void shouldRenderDateTimeFragmentExtractionsForExpressionProjectionsCorre } @Test // DATAMONGO-979 - public void shouldRenderSizeExpressionInProjection() { + void shouldRenderSizeExpressionInProjection() { ProjectionOperation operation = Aggregation // .project() // @@ -375,7 +376,7 @@ public void shouldRenderSizeExpressionInProjection() { } @Test // DATAMONGO-979 - public void shouldRenderGenericSizeExpressionInProjection() { + void shouldRenderGenericSizeExpressionInProjection() { ProjectionOperation operation = Aggregation // .project() // @@ -389,7 +390,7 @@ public void shouldRenderGenericSizeExpressionInProjection() { } @Test // DATAMONGO-1457 - public void shouldRenderSliceCorrectly() throws Exception { + void shouldRenderSliceCorrectly() throws Exception { ProjectionOperation operation = Aggregation.project().and("field").slice(10).as("renamed"); @@ -400,7 +401,7 @@ public void shouldRenderSliceCorrectly() throws Exception { } @Test // DATAMONGO-1457 - public void shouldRenderSliceWithPositionCorrectly() throws Exception { + void shouldRenderSliceWithPositionCorrectly() throws Exception { ProjectionOperation operation = Aggregation.project().and("field").slice(10, 5).as("renamed"); @@ -411,7 +412,7 @@ public void shouldRenderSliceWithPositionCorrectly() throws Exception { } @Test // DATAMONGO-784 - public void shouldRenderCmpCorrectly() { + void shouldRenderCmpCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").cmp(10).as("cmp10"); @@ -420,7 +421,7 @@ public void shouldRenderCmpCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderEqCorrectly() { + void shouldRenderEqCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").eq(10).as("eq10"); @@ -429,7 +430,7 @@ public void shouldRenderEqCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderGtCorrectly() { + void shouldRenderGtCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").gt(10).as("gt10"); @@ -438,7 +439,7 @@ public void shouldRenderGtCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderGteCorrectly() { + void shouldRenderGteCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").gte(10).as("gte10"); @@ -447,7 +448,7 @@ public void shouldRenderGteCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderLtCorrectly() { + void shouldRenderLtCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").lt(10).as("lt10"); @@ -456,7 +457,7 @@ public void shouldRenderLtCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderLteCorrectly() { + void shouldRenderLteCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").lte(10).as("lte10"); @@ -465,7 +466,7 @@ public void shouldRenderLteCorrectly() { } @Test // DATAMONGO-784 - public void shouldRenderNeCorrectly() { + void shouldRenderNeCorrectly() { ProjectionOperation operation = Aggregation.project().and("field").ne(10).as("ne10"); @@ -474,7 +475,7 @@ public void shouldRenderNeCorrectly() { } @Test // DATAMONGO-1536 - public void shouldRenderSetEquals() { + void shouldRenderSetEquals() { Document agg = project("A", "B").and("A").equalsArrays("B").as("sameElements") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -484,7 +485,7 @@ public void shouldRenderSetEquals() { } @Test // DATAMONGO-1536 - public void shouldRenderSetEqualsAggregationExpresssion() { + void shouldRenderSetEqualsAggregationExpresssion() { Document agg = project("A", "B").and(SetOperators.arrayAsSet("A").isEqualTo("B")).as("sameElements") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -494,7 +495,7 @@ public void shouldRenderSetEqualsAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderSetIntersection() { + void shouldRenderSetIntersection() { Document agg = project("A", "B").and("A").intersectsArrays("B").as("commonToBoth") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -504,7 +505,7 @@ public void shouldRenderSetIntersection() { } @Test // DATAMONGO-1536 - public void shouldRenderSetIntersectionAggregationExpresssion() { + void shouldRenderSetIntersectionAggregationExpresssion() { Document agg = project("A", "B").and(SetOperators.arrayAsSet("A").intersects("B")).as("commonToBoth") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -514,7 +515,7 @@ public void shouldRenderSetIntersectionAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderSetUnion() { + void shouldRenderSetUnion() { Document agg = project("A", "B").and("A").unionArrays("B").as("allValues").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -523,7 +524,7 @@ public void shouldRenderSetUnion() { } @Test // DATAMONGO-1536 - public void shouldRenderSetUnionAggregationExpresssion() { + void shouldRenderSetUnionAggregationExpresssion() { Document agg = project("A", "B").and(SetOperators.arrayAsSet("A").union("B")).as("allValues") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -533,7 +534,7 @@ public void shouldRenderSetUnionAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderSetDifference() { + void shouldRenderSetDifference() { Document agg = project("A", "B").and("B").differenceToArray("A").as("inBOnly") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -543,7 +544,7 @@ public void shouldRenderSetDifference() { } @Test // DATAMONGO-1536 - public void shouldRenderSetDifferenceAggregationExpresssion() { + void shouldRenderSetDifferenceAggregationExpresssion() { Document agg = project("A", "B").and(SetOperators.arrayAsSet("B").differenceTo("A")).as("inBOnly") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -553,7 +554,7 @@ public void shouldRenderSetDifferenceAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderSetIsSubset() { + void shouldRenderSetIsSubset() { Document agg = project("A", "B").and("A").subsetOfArray("B").as("aIsSubsetOfB") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -563,7 +564,7 @@ public void shouldRenderSetIsSubset() { } @Test // DATAMONGO-1536 - public void shouldRenderSetIsSubsetAggregationExpresssion() { + void shouldRenderSetIsSubsetAggregationExpresssion() { Document agg = project("A", "B").and(SetOperators.arrayAsSet("A").isSubsetOf("B")).as("aIsSubsetOfB") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -573,7 +574,7 @@ public void shouldRenderSetIsSubsetAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderAnyElementTrue() { + void shouldRenderAnyElementTrue() { Document agg = project("responses").and("responses").anyElementInArrayTrue().as("isAnyTrue") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -583,7 +584,7 @@ public void shouldRenderAnyElementTrue() { } @Test // DATAMONGO-1536 - public void shouldRenderAnyElementTrueAggregationExpresssion() { + void shouldRenderAnyElementTrueAggregationExpresssion() { Document agg = project("responses").and(SetOperators.arrayAsSet("responses").anyElementTrue()).as("isAnyTrue") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -593,7 +594,7 @@ public void shouldRenderAnyElementTrueAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderAllElementsTrue() { + void shouldRenderAllElementsTrue() { Document agg = project("responses").and("responses").allElementsInArrayTrue().as("isAllTrue") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -603,7 +604,7 @@ public void shouldRenderAllElementsTrue() { } @Test // DATAMONGO-1536 - public void shouldRenderAllElementsTrueAggregationExpresssion() { + void shouldRenderAllElementsTrueAggregationExpresssion() { Document agg = project("responses").and(SetOperators.arrayAsSet("responses").allElementsTrue()).as("isAllTrue") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -613,7 +614,7 @@ public void shouldRenderAllElementsTrueAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderAbs() { + void shouldRenderAbs() { Document agg = project().and("anyNumber").absoluteValue().as("absoluteValue") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -622,7 +623,7 @@ public void shouldRenderAbs() { } @Test // DATAMONGO-1536 - public void shouldRenderAbsAggregationExpresssion() { + void shouldRenderAbsAggregationExpresssion() { Document agg = project() .and( @@ -634,7 +635,7 @@ public void shouldRenderAbsAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderAddAggregationExpresssion() { + void shouldRenderAddAggregationExpresssion() { Document agg = project().and(ArithmeticOperators.valueOf("price").add("fee")).as("total") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -643,7 +644,7 @@ public void shouldRenderAddAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderCeil() { + void shouldRenderCeil() { Document agg = project().and("anyNumber").ceil().as("ceilValue").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -651,7 +652,7 @@ public void shouldRenderCeil() { } @Test // DATAMONGO-1536 - public void shouldRenderCeilAggregationExpresssion() { + void shouldRenderCeilAggregationExpresssion() { Document agg = project().and( ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).ceil()) @@ -662,7 +663,7 @@ public void shouldRenderCeilAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderDivide() { + void shouldRenderDivide() { Document agg = project().and("value") .divide(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).as("result") @@ -673,7 +674,7 @@ public void shouldRenderDivide() { } @Test // DATAMONGO-1536 - public void shouldRenderDivideAggregationExpresssion() { + void shouldRenderDivideAggregationExpresssion() { Document agg = project() .and(ArithmeticOperators.valueOf("anyNumber") @@ -685,7 +686,7 @@ public void shouldRenderDivideAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderExp() { + void shouldRenderExp() { Document agg = project().and("value").exp().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -693,7 +694,7 @@ public void shouldRenderExp() { } @Test // DATAMONGO-1536 - public void shouldRenderExpAggregationExpresssion() { + void shouldRenderExpAggregationExpresssion() { Document agg = project() .and( @@ -705,7 +706,7 @@ public void shouldRenderExpAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderFloor() { + void shouldRenderFloor() { Document agg = project().and("value").floor().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -713,7 +714,7 @@ public void shouldRenderFloor() { } @Test // DATAMONGO-1536 - public void shouldRenderFloorAggregationExpresssion() { + void shouldRenderFloorAggregationExpresssion() { Document agg = project().and( ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).floor()) @@ -724,7 +725,7 @@ public void shouldRenderFloorAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderLn() { + void shouldRenderLn() { Document agg = project().and("value").ln().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -732,7 +733,7 @@ public void shouldRenderLn() { } @Test // DATAMONGO-1536 - public void shouldRenderLnAggregationExpresssion() { + void shouldRenderLnAggregationExpresssion() { Document agg = project() .and(ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).ln()) @@ -743,7 +744,7 @@ public void shouldRenderLnAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderLog() { + void shouldRenderLog() { Document agg = project().and("value").log(2).as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -751,7 +752,7 @@ public void shouldRenderLog() { } @Test // DATAMONGO-1536 - public void shouldRenderLogAggregationExpresssion() { + void shouldRenderLogAggregationExpresssion() { Document agg = project().and( ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).log(2)) @@ -762,7 +763,7 @@ public void shouldRenderLogAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderLog10() { + void shouldRenderLog10() { Document agg = project().and("value").log10().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -770,7 +771,7 @@ public void shouldRenderLog10() { } @Test // DATAMONGO-1536 - public void shouldRenderLog10AggregationExpresssion() { + void shouldRenderLog10AggregationExpresssion() { Document agg = project().and( ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).log10()) @@ -781,7 +782,7 @@ public void shouldRenderLog10AggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderMod() { + void shouldRenderMod() { Document agg = project().and("value").mod(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))) .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -791,7 +792,7 @@ public void shouldRenderMod() { } @Test // DATAMONGO-1536 - public void shouldRenderModAggregationExpresssion() { + void shouldRenderModAggregationExpresssion() { Document agg = project().and( ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).mod(2)) @@ -802,7 +803,7 @@ public void shouldRenderModAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderMultiply() { + void shouldRenderMultiply() { Document agg = project().and("value") .multiply(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).as("result") @@ -813,7 +814,7 @@ public void shouldRenderMultiply() { } @Test // DATAMONGO-1536 - public void shouldRenderMultiplyAggregationExpresssion() { + void shouldRenderMultiplyAggregationExpresssion() { Document agg = project() .and(ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))) @@ -825,7 +826,7 @@ public void shouldRenderMultiplyAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderPow() { + void shouldRenderPow() { Document agg = project().and("value").pow(2).as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -833,7 +834,7 @@ public void shouldRenderPow() { } @Test // DATAMONGO-1536 - public void shouldRenderPowAggregationExpresssion() { + void shouldRenderPowAggregationExpresssion() { Document agg = project().and( ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).pow(2)) @@ -844,7 +845,7 @@ public void shouldRenderPowAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderSqrt() { + void shouldRenderSqrt() { Document agg = project().and("value").sqrt().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -852,7 +853,7 @@ public void shouldRenderSqrt() { } @Test // DATAMONGO-1536 - public void shouldRenderSqrtAggregationExpresssion() { + void shouldRenderSqrtAggregationExpresssion() { Document agg = project().and( ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).sqrt()) @@ -863,7 +864,7 @@ public void shouldRenderSqrtAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderSubtract() { + void shouldRenderSubtract() { Document agg = project().and("numericField").minus(AggregationFunctionExpressions.SIZE.of(field("someArray"))) .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -873,7 +874,7 @@ public void shouldRenderSubtract() { } @Test // DATAMONGO-1536 - public void shouldRenderSubtractAggregationExpresssion() { + void shouldRenderSubtractAggregationExpresssion() { Document agg = project() .and(ArithmeticOperators.valueOf("numericField") @@ -885,7 +886,7 @@ public void shouldRenderSubtractAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderTrunc() { + void shouldRenderTrunc() { Document agg = project().and("value").trunc().as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -893,7 +894,7 @@ public void shouldRenderTrunc() { } @Test // DATAMONGO-1536 - public void shouldRenderTruncAggregationExpresssion() { + void shouldRenderTruncAggregationExpresssion() { Document agg = project().and( ArithmeticOperators.valueOf(AggregationFunctionExpressions.SUBTRACT.of(field("start"), field("end"))).trunc()) @@ -904,7 +905,7 @@ public void shouldRenderTruncAggregationExpresssion() { } @Test // DATAMONGO-1536 - public void shouldRenderConcat() { + void shouldRenderConcat() { Document agg = project().and("item").concat(" - ", field("description")).as("itemDescription") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -915,7 +916,7 @@ public void shouldRenderConcat() { } @Test // DATAMONGO-1536 - public void shouldRenderConcatAggregationExpression() { + void shouldRenderConcatAggregationExpression() { Document agg = project().and(StringOperators.valueOf("item").concat(" - ").concatValueOf("description")) .as("itemDescription").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -926,7 +927,7 @@ public void shouldRenderConcatAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderSubstr() { + void shouldRenderSubstr() { Document agg = project().and("quarter").substring(0, 2).as("yearSubstring").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -934,7 +935,7 @@ public void shouldRenderSubstr() { } @Test // DATAMONGO-1536 - public void shouldRenderSubstrAggregationExpression() { + void shouldRenderSubstrAggregationExpression() { Document agg = project().and(StringOperators.valueOf("quarter").substring(0, 2)).as("yearSubstring") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -943,7 +944,7 @@ public void shouldRenderSubstrAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderToLower() { + void shouldRenderToLower() { Document agg = project().and("item").toLower().as("item").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -951,7 +952,7 @@ public void shouldRenderToLower() { } @Test // DATAMONGO-1536 - public void shouldRenderToLowerAggregationExpression() { + void shouldRenderToLowerAggregationExpression() { Document agg = project().and(StringOperators.valueOf("item").toLower()).as("item") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -960,7 +961,7 @@ public void shouldRenderToLowerAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderToUpper() { + void shouldRenderToUpper() { Document agg = project().and("item").toUpper().as("item").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -968,7 +969,7 @@ public void shouldRenderToUpper() { } @Test // DATAMONGO-1536 - public void shouldRenderToUpperAggregationExpression() { + void shouldRenderToUpperAggregationExpression() { Document agg = project().and(StringOperators.valueOf("item").toUpper()).as("item") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -977,7 +978,7 @@ public void shouldRenderToUpperAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderStrCaseCmp() { + void shouldRenderStrCaseCmp() { Document agg = project().and("quarter").strCaseCmp("13q4").as("comparisonResult") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -987,7 +988,7 @@ public void shouldRenderStrCaseCmp() { } @Test // DATAMONGO-1536 - public void shouldRenderStrCaseCmpAggregationExpression() { + void shouldRenderStrCaseCmpAggregationExpression() { Document agg = project().and(StringOperators.valueOf("quarter").strCaseCmp("13q4")).as("comparisonResult") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -997,7 +998,7 @@ public void shouldRenderStrCaseCmpAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderArrayElementAt() { + void shouldRenderArrayElementAt() { Document agg = project().and("favorites").arrayElementAt(0).as("first").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1005,7 +1006,7 @@ public void shouldRenderArrayElementAt() { } @Test // DATAMONGO-1536 - public void shouldRenderArrayElementAtAggregationExpression() { + void shouldRenderArrayElementAtAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("favorites").elementAt(0)).as("first") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1014,7 +1015,7 @@ public void shouldRenderArrayElementAtAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderConcatArrays() { + void shouldRenderConcatArrays() { Document agg = project().and("instock").concatArrays("ordered").as("items").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1023,7 +1024,7 @@ public void shouldRenderConcatArrays() { } @Test // DATAMONGO-1536 - public void shouldRenderConcatArraysAggregationExpression() { + void shouldRenderConcatArraysAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("instock").concat("ordered")).as("items") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1033,7 +1034,7 @@ public void shouldRenderConcatArraysAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderIsArray() { + void shouldRenderIsArray() { Document agg = project().and("instock").isArray().as("isAnArray").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1041,7 +1042,7 @@ public void shouldRenderIsArray() { } @Test // DATAMONGO-1536 - public void shouldRenderIsArrayAggregationExpression() { + void shouldRenderIsArrayAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("instock").isArray()).as("isAnArray") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1050,7 +1051,7 @@ public void shouldRenderIsArrayAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderSizeAggregationExpression() { + void shouldRenderSizeAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("instock").length()).as("arraySize") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1059,7 +1060,7 @@ public void shouldRenderSizeAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderSliceAggregationExpression() { + void shouldRenderSliceAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("favorites").slice().itemCount(3)).as("threeFavorites") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1068,7 +1069,7 @@ public void shouldRenderSliceAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderSliceWithPositionAggregationExpression() { + void shouldRenderSliceWithPositionAggregationExpression() { Document agg = project().and(ArrayOperators.arrayOf("favorites").slice().offset(2).itemCount(3)) .as("threeFavorites").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1077,7 +1078,7 @@ public void shouldRenderSliceWithPositionAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderLiteral() { + void shouldRenderLiteral() { Document agg = project().and("$1").asLiteral().as("literalOnly").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1085,7 +1086,7 @@ public void shouldRenderLiteral() { } @Test // DATAMONGO-1536 - public void shouldRenderLiteralAggregationExpression() { + void shouldRenderLiteralAggregationExpression() { Document agg = project().and(LiteralOperators.valueOf("$1").asLiteral()).as("literalOnly") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1094,7 +1095,7 @@ public void shouldRenderLiteralAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderDayOfYearAggregationExpression() { + void shouldRenderDayOfYearAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").dayOfYear()).as("dayOfYear") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1103,7 +1104,7 @@ public void shouldRenderDayOfYearAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderDayOfYearAggregationExpressionWithTimezone() { + void shouldRenderDayOfYearAggregationExpressionWithTimezone() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).dayOfYear()).as("dayOfYear") @@ -1114,7 +1115,7 @@ public void shouldRenderDayOfYearAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1834 - public void shouldRenderTimeZoneFromField() { + void shouldRenderTimeZoneFromField() { Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.ofField("tz")).dayOfYear()) .as("dayOfYear").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1124,7 +1125,7 @@ public void shouldRenderTimeZoneFromField() { } @Test // DATAMONGO-1834 - public void shouldRenderTimeZoneFromExpression() { + void shouldRenderTimeZoneFromExpression() { Document agg = project() .and(DateOperators.dateOf("date") @@ -1136,7 +1137,7 @@ public void shouldRenderTimeZoneFromExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderDayOfMonthAggregationExpression() { + void shouldRenderDayOfMonthAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").dayOfMonth()).as("day") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1145,7 +1146,7 @@ public void shouldRenderDayOfMonthAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderDayOfMonthAggregationExpressionWithTimezone() { + void shouldRenderDayOfMonthAggregationExpressionWithTimezone() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).dayOfMonth()).as("day") @@ -1156,7 +1157,7 @@ public void shouldRenderDayOfMonthAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderDayOfWeekAggregationExpression() { + void shouldRenderDayOfWeekAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").dayOfWeek()).as("dayOfWeek") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1165,7 +1166,7 @@ public void shouldRenderDayOfWeekAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderDayOfWeekAggregationExpressionWithTimezone() { + void shouldRenderDayOfWeekAggregationExpressionWithTimezone() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).dayOfWeek()).as("dayOfWeek") @@ -1176,7 +1177,7 @@ public void shouldRenderDayOfWeekAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderYearAggregationExpression() { + void shouldRenderYearAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").year()).as("year") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1185,7 +1186,7 @@ public void shouldRenderYearAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderYearAggregationExpressionWithTimezone() { + void shouldRenderYearAggregationExpressionWithTimezone() { Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).year()) .as("year").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1195,7 +1196,7 @@ public void shouldRenderYearAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderMonthAggregationExpression() { + void shouldRenderMonthAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").month()).as("month") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1204,7 +1205,7 @@ public void shouldRenderMonthAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderMonthAggregationExpressionWithTimezone() { + void shouldRenderMonthAggregationExpressionWithTimezone() { Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).month()) .as("month").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1214,7 +1215,7 @@ public void shouldRenderMonthAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderWeekAggregationExpression() { + void shouldRenderWeekAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").week()).as("week") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1223,7 +1224,7 @@ public void shouldRenderWeekAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderWeekAggregationExpressionWithTimezone() { + void shouldRenderWeekAggregationExpressionWithTimezone() { Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).week()) .as("week").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1233,7 +1234,7 @@ public void shouldRenderWeekAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderHourAggregationExpression() { + void shouldRenderHourAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").hour()).as("hour") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1242,7 +1243,7 @@ public void shouldRenderHourAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderHourAggregationExpressionWithTimezone() { + void shouldRenderHourAggregationExpressionWithTimezone() { Document agg = project().and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).hour()) .as("hour").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1252,7 +1253,7 @@ public void shouldRenderHourAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderMinuteAggregationExpression() { + void shouldRenderMinuteAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").minute()).as("minute") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1261,7 +1262,7 @@ public void shouldRenderMinuteAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderMinuteAggregationExpressionWithTimezone() { + void shouldRenderMinuteAggregationExpressionWithTimezone() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).minute()).as("minute") @@ -1272,7 +1273,7 @@ public void shouldRenderMinuteAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderSecondAggregationExpression() { + void shouldRenderSecondAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").second()).as("second") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1281,7 +1282,7 @@ public void shouldRenderSecondAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderSecondAggregationExpressionWithTimezone() { + void shouldRenderSecondAggregationExpressionWithTimezone() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).second()).as("second") @@ -1292,7 +1293,7 @@ public void shouldRenderSecondAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderMillisecondAggregationExpression() { + void shouldRenderMillisecondAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").millisecond()).as("msec") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1301,7 +1302,7 @@ public void shouldRenderMillisecondAggregationExpression() { } @Test // DATAMONGO-1834 - public void shouldRenderMillisecondAggregationExpressionWithTimezone() { + void shouldRenderMillisecondAggregationExpressionWithTimezone() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).millisecond()).as("msec") @@ -1312,7 +1313,7 @@ public void shouldRenderMillisecondAggregationExpressionWithTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderDateToString() { + void shouldRenderDateToString() { Document agg = project().and("date").dateAsFormattedString("%H:%M:%S:%L").as("time") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1322,7 +1323,7 @@ public void shouldRenderDateToString() { } @Test // DATAMONGO-2047 - public void shouldRenderDateToStringWithoutFormatOption() { + void shouldRenderDateToStringWithoutFormatOption() { Document agg = project().and("date").dateAsFormattedString().as("time").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1330,7 +1331,7 @@ public void shouldRenderDateToStringWithoutFormatOption() { } @Test // DATAMONGO-1536 - public void shouldRenderDateToStringAggregationExpression() { + void shouldRenderDateToStringAggregationExpression() { Document agg = project().and(DateOperators.dateOf("date").toString("%H:%M:%S:%L")).as("time") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1340,7 +1341,7 @@ public void shouldRenderDateToStringAggregationExpression() { } @Test // DATAMONGO-1834, DATAMONGO-2047 - public void shouldRenderDateToStringAggregationExpressionWithTimezone() { + void shouldRenderDateToStringAggregationExpressionWithTimezone() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).toString("%H:%M:%S:%L")) @@ -1358,7 +1359,7 @@ public void shouldRenderDateToStringAggregationExpressionWithTimezone() { } @Test // DATAMONGO-2047 - public void shouldRenderDateToStringWithOnNull() { + void shouldRenderDateToStringWithOnNull() { Document agg = project() .and(DateOperators.dateOf("date").toStringWithDefaultFormat().onNullReturnValueOf("fallback-field")).as("time") @@ -1369,7 +1370,7 @@ public void shouldRenderDateToStringWithOnNull() { } @Test // DATAMONGO-2047 - public void shouldRenderDateToStringWithOnNullExpression() { + void shouldRenderDateToStringWithOnNullExpression() { Document agg = project() .and(DateOperators.dateOf("date").toStringWithDefaultFormat() @@ -1381,7 +1382,7 @@ public void shouldRenderDateToStringWithOnNullExpression() { } @Test // DATAMONGO-2047 - public void shouldRenderDateToStringWithOnNullAndTimezone() { + void shouldRenderDateToStringWithOnNullAndTimezone() { Document agg = project().and(DateOperators.dateOf("date").toStringWithDefaultFormat() .onNullReturnValueOf("fallback-field").withTimezone(Timezone.ofField("foo"))).as("time") @@ -1392,7 +1393,7 @@ public void shouldRenderDateToStringWithOnNullAndTimezone() { } @Test // DATAMONGO-1536 - public void shouldRenderSumAggregationExpression() { + void shouldRenderSumAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("quizzes").sum()).as("quizTotal") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1401,7 +1402,7 @@ public void shouldRenderSumAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderSumWithMultipleArgsAggregationExpression() { + void shouldRenderSumWithMultipleArgsAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("final").sum().and("midterm")).as("examTotal") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1410,7 +1411,7 @@ public void shouldRenderSumWithMultipleArgsAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderAvgAggregationExpression() { + void shouldRenderAvgAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("quizzes").avg()).as("quizAvg") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1419,7 +1420,7 @@ public void shouldRenderAvgAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderAvgWithMultipleArgsAggregationExpression() { + void shouldRenderAvgWithMultipleArgsAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("final").avg().and("midterm")).as("examAvg") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1428,7 +1429,7 @@ public void shouldRenderAvgWithMultipleArgsAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderMaxAggregationExpression() { + void shouldRenderMaxAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("quizzes").max()).as("quizMax") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1437,7 +1438,7 @@ public void shouldRenderMaxAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderMaxWithMultipleArgsAggregationExpression() { + void shouldRenderMaxWithMultipleArgsAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("final").max().and("midterm")).as("examMax") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1446,7 +1447,7 @@ public void shouldRenderMaxWithMultipleArgsAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderMinAggregationExpression() { + void shouldRenderMinAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("quizzes").min()).as("quizMin") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1455,7 +1456,7 @@ public void shouldRenderMinAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderMinWithMultipleArgsAggregationExpression() { + void shouldRenderMinWithMultipleArgsAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("final").min().and("midterm")).as("examMin") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1464,7 +1465,7 @@ public void shouldRenderMinWithMultipleArgsAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderStdDevPopAggregationExpression() { + void shouldRenderStdDevPopAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("scores").stdDevPop()).as("stdDev") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1473,7 +1474,7 @@ public void shouldRenderStdDevPopAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderStdDevSampAggregationExpression() { + void shouldRenderStdDevSampAggregationExpression() { Document agg = project().and(ArithmeticOperators.valueOf("scores").stdDevSamp()).as("stdDev") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1482,7 +1483,7 @@ public void shouldRenderStdDevSampAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderCmpAggregationExpression() { + void shouldRenderCmpAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").compareToValue(250)).as("cmp250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1491,7 +1492,7 @@ public void shouldRenderCmpAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderEqAggregationExpression() { + void shouldRenderEqAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").equalToValue(250)).as("eq250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1500,7 +1501,7 @@ public void shouldRenderEqAggregationExpression() { } @Test // DATAMONGO-2513 - public void shouldRenderEqAggregationExpressionWithListComparison() { + void shouldRenderEqAggregationExpressionWithListComparison() { Document agg = project().and(ComparisonOperators.valueOf("qty").equalToValue(Arrays.asList(250))).as("eq250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1509,7 +1510,7 @@ public void shouldRenderEqAggregationExpressionWithListComparison() { } @Test // DATAMONGO-1536 - public void shouldRenderGtAggregationExpression() { + void shouldRenderGtAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").greaterThanValue(250)).as("gt250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1518,7 +1519,7 @@ public void shouldRenderGtAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderGteAggregationExpression() { + void shouldRenderGteAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").greaterThanEqualToValue(250)).as("gte250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1527,7 +1528,7 @@ public void shouldRenderGteAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderLtAggregationExpression() { + void shouldRenderLtAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").lessThanValue(250)).as("lt250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1536,7 +1537,7 @@ public void shouldRenderLtAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderLteAggregationExpression() { + void shouldRenderLteAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").lessThanEqualToValue(250)).as("lte250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1545,7 +1546,7 @@ public void shouldRenderLteAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderNeAggregationExpression() { + void shouldRenderNeAggregationExpression() { Document agg = project().and(ComparisonOperators.valueOf("qty").notEqualToValue(250)).as("ne250") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1554,7 +1555,7 @@ public void shouldRenderNeAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderLogicAndAggregationExpression() { + void shouldRenderLogicAndAggregationExpression() { Document agg = project() .and(BooleanOperators.valueOf(ComparisonOperators.valueOf("qty").greaterThanValue(100)) @@ -1566,7 +1567,7 @@ public void shouldRenderLogicAndAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderLogicOrAggregationExpression() { + void shouldRenderLogicOrAggregationExpression() { Document agg = project() .and(BooleanOperators.valueOf(ComparisonOperators.valueOf("qty").greaterThanValue(250)) @@ -1578,7 +1579,7 @@ public void shouldRenderLogicOrAggregationExpression() { } @Test // DATAMONGO-1536 - public void shouldRenderNotAggregationExpression() { + void shouldRenderNotAggregationExpression() { Document agg = project().and(BooleanOperators.not(ComparisonOperators.valueOf("qty").greaterThanValue(250))) .as("result").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1587,7 +1588,7 @@ public void shouldRenderNotAggregationExpression() { } @Test // DATAMONGO-1540 - public void shouldRenderMapAggregationExpression() { + void shouldRenderMapAggregationExpression() { Document agg = Aggregation.project() .and(VariableOperators.mapItemsOf("quizzes").as("grade") @@ -1599,7 +1600,7 @@ public void shouldRenderMapAggregationExpression() { } @Test // DATAMONGO-1540 - public void shouldRenderMapAggregationExpressionOnExpression() { + void shouldRenderMapAggregationExpressionOnExpression() { Document agg = Aggregation.project() .and(VariableOperators.mapItemsOf(AggregationFunctionExpressions.SIZE.of("foo")).as("grade") @@ -1611,7 +1612,7 @@ public void shouldRenderMapAggregationExpressionOnExpression() { } @Test // DATAMONGO-861, DATAMONGO-1542 - public void shouldRenderIfNullConditionAggregationExpression() { + void shouldRenderIfNullConditionAggregationExpression() { Document agg = project().and( ConditionalOperators.ifNull(ArrayOperators.arrayOf("array").elementAt(1)).then("a more sophisticated value")) @@ -1622,7 +1623,7 @@ public void shouldRenderIfNullConditionAggregationExpression() { } @Test // DATAMONGO-1542 - public void shouldRenderIfNullValueAggregationExpression() { + void shouldRenderIfNullValueAggregationExpression() { Document agg = project() .and(ConditionalOperators.ifNull("field").then(ArrayOperators.arrayOf("array").elementAt(1))).as("result") @@ -1633,7 +1634,7 @@ public void shouldRenderIfNullValueAggregationExpression() { } @Test // DATAMONGO-861, DATAMONGO-1542 - public void fieldReplacementIfNullShouldRenderCorrectly() { + void fieldReplacementIfNullShouldRenderCorrectly() { Document agg = project().and(ConditionalOperators.ifNull("optional").thenValueOf("$never-null")).as("result") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1643,7 +1644,7 @@ public void fieldReplacementIfNullShouldRenderCorrectly() { } @Test // DATAMONGO-1538 - public void shouldRenderLetExpressionCorrectly() { + void shouldRenderLetExpressionCorrectly() { Document agg = Aggregation.project() .and(VariableOperators @@ -1665,7 +1666,7 @@ public void shouldRenderLetExpressionCorrectly() { } @Test // DATAMONGO-1538 - public void shouldRenderLetExpressionCorrectlyWhenUsingLetOnProjectionBuilder() { + void shouldRenderLetExpressionCorrectlyWhenUsingLetOnProjectionBuilder() { ExpressionVariable var1 = newVariable("total") .forExpression(AggregationFunctionExpressions.ADD.of(Fields.field("price"), Fields.field("tax"))); @@ -1688,7 +1689,7 @@ public void shouldRenderLetExpressionCorrectlyWhenUsingLetOnProjectionBuilder() } @Test // DATAMONGO-1548 - public void shouldRenderIndexOfBytesCorrectly() { + void shouldRenderIndexOfBytesCorrectly() { Document agg = project().and(StringOperators.valueOf("item").indexOf("foo")).as("byteLocation") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1698,7 +1699,7 @@ public void shouldRenderIndexOfBytesCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIndexOfBytesWithRangeCorrectly() { + void shouldRenderIndexOfBytesWithRangeCorrectly() { Document agg = project() .and(StringOperators.valueOf("item").indexOf("foo") @@ -1710,7 +1711,7 @@ public void shouldRenderIndexOfBytesWithRangeCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIndexOfCPCorrectly() { + void shouldRenderIndexOfCPCorrectly() { Document agg = project().and(StringOperators.valueOf("item").indexOfCP("foo")).as("cpLocation") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1719,7 +1720,7 @@ public void shouldRenderIndexOfCPCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIndexOfCPWithRangeCorrectly() { + void shouldRenderIndexOfCPWithRangeCorrectly() { Document agg = project() .and(StringOperators.valueOf("item").indexOfCP("foo") @@ -1731,7 +1732,7 @@ public void shouldRenderIndexOfCPWithRangeCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderSplitCorrectly() { + void shouldRenderSplitCorrectly() { Document agg = project().and(StringOperators.valueOf("city").split(", ")).as("city_state") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1740,7 +1741,7 @@ public void shouldRenderSplitCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderStrLenBytesCorrectly() { + void shouldRenderStrLenBytesCorrectly() { Document agg = project().and(StringOperators.valueOf("name").length()).as("length") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1749,7 +1750,7 @@ public void shouldRenderStrLenBytesCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderStrLenCPCorrectly() { + void shouldRenderStrLenCPCorrectly() { Document agg = project().and(StringOperators.valueOf("name").lengthCP()).as("length") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1758,7 +1759,7 @@ public void shouldRenderStrLenCPCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderSubstrCPCorrectly() { + void shouldRenderSubstrCPCorrectly() { Document agg = project().and(StringOperators.valueOf("quarter").substringCP(0, 2)).as("yearSubstring") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1766,27 +1767,27 @@ public void shouldRenderSubstrCPCorrectly() { assertThat(agg) .isEqualTo(Document.parse("{ $project : { yearSubstring: { $substrCP: [ \"$quarter\", 0, 2 ] } } }")); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindCorrectly() { + + @Test // GH-3725 + void shouldRenderRegexFindCorrectly() { Document agg = project().and(StringOperators.valueOf("field1").regexFind("e")).as("regex") .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).isEqualTo(Document.parse("{ $project : { regex: { $regexFind: { \"input\" : \"$field1\", \"regex\" : \"e\" } } } }")); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindAllCorrectly() { + + @Test // GH-3725 + void shouldRenderRegexFindAllCorrectly() { Document agg = project().and(StringOperators.valueOf("field1").regexFindAll("e")).as("regex") .toDocument(Aggregation.DEFAULT_CONTEXT); assertThat(agg).isEqualTo(Document.parse("{ $project : { regex: { $regexFindAll: { \"input\" : \"$field1\", \"regex\" : \"e\" } } } }")); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexMatchCorrectly() { + + @Test // GH-3725 + void shouldRenderRegexMatchCorrectly() { Document agg = project().and(StringOperators.valueOf("field1").regexMatch("e")).as("regex") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1795,7 +1796,7 @@ public void shouldRenderRegexMatchCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIndexOfArrayCorrectly() { + void shouldRenderIndexOfArrayCorrectly() { Document agg = project().and(ArrayOperators.arrayOf("items").indexOf(2)).as("index") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1804,7 +1805,7 @@ public void shouldRenderIndexOfArrayCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderRangeCorrectly() { + void shouldRenderRangeCorrectly() { Document agg = project().and(ArrayOperators.RangeOperator.rangeStartingAt(0L).to("distance").withStepSize(25L)) .as("rest_stops").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1815,7 +1816,7 @@ public void shouldRenderRangeCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderReverseArrayCorrectly() { + void shouldRenderReverseArrayCorrectly() { Document agg = project().and(ArrayOperators.arrayOf("favorites").reverse()).as("reverseFavorites") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1824,7 +1825,7 @@ public void shouldRenderReverseArrayCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderReduceWithSimpleObjectCorrectly() { + void shouldRenderReduceWithSimpleObjectCorrectly() { Document agg = project() .and(ArrayOperators.arrayOf("probabilityArr") @@ -1836,7 +1837,7 @@ public void shouldRenderReduceWithSimpleObjectCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderReduceWithComplexObjectCorrectly() { + void shouldRenderReduceWithComplexObjectCorrectly() { PropertyExpression sum = PropertyExpression.property("sum").definedAs( ArithmeticOperators.valueOf(Variable.VALUE.referringTo("sum").getName()).add(Variable.THIS.getName())); @@ -1853,7 +1854,7 @@ public void shouldRenderReduceWithComplexObjectCorrectly() { } @Test // DATAMONGO-1843 - public void shouldRenderReduceWithInputAndInExpressionsCorrectly() { + void shouldRenderReduceWithInputAndInExpressionsCorrectly() { Document expected = Document.parse( "{ \"$project\" : { \"results\" : { \"$reduce\" : { \"input\" : { \"$slice\" : [\"$array\", 5] }, \"initialValue\" : \"\", \"in\" : { \"$concat\" : [\"$$value\", \"/\", \"$$this\"] } } } } }"); @@ -1874,7 +1875,7 @@ public void shouldRenderReduceWithInputAndInExpressionsCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderZipCorrectly() { + void shouldRenderZipCorrectly() { AggregationExpression elemAt0 = ArrayOperators.arrayOf("matrix").elementAt(0); AggregationExpression elemAt1 = ArrayOperators.arrayOf("matrix").elementAt(1); @@ -1889,7 +1890,7 @@ public void shouldRenderZipCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderInCorrectly() { + void shouldRenderInCorrectly() { Document agg = project().and(ArrayOperators.arrayOf("in_stock").containsValue("bananas")).as("has_bananas") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1899,7 +1900,7 @@ public void shouldRenderInCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIsoDayOfWeekCorrectly() { + void shouldRenderIsoDayOfWeekCorrectly() { Document agg = project().and(DateOperators.dateOf("birthday").isoDayOfWeek()).as("dayOfWeek") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1908,7 +1909,7 @@ public void shouldRenderIsoDayOfWeekCorrectly() { } @Test // DATAMONGO-1834 - public void shouldRenderIsoDayOfWeekWithTimezoneCorrectly() { + void shouldRenderIsoDayOfWeekWithTimezoneCorrectly() { Document agg = project() .and(DateOperators.dateOf("birthday").withTimezone(Timezone.valueOf("America/Chicago")).isoDayOfWeek()) @@ -1919,7 +1920,7 @@ public void shouldRenderIsoDayOfWeekWithTimezoneCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIsoWeekCorrectly() { + void shouldRenderIsoWeekCorrectly() { Document agg = project().and(DateOperators.dateOf("date").isoWeek()).as("weekNumber") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1928,7 +1929,7 @@ public void shouldRenderIsoWeekCorrectly() { } @Test // DATAMONGO-1834 - public void shouldRenderIsoWeekWithTimezoneCorrectly() { + void shouldRenderIsoWeekWithTimezoneCorrectly() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).isoWeek()).as("weekNumber") @@ -1939,7 +1940,7 @@ public void shouldRenderIsoWeekWithTimezoneCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderIsoWeekYearCorrectly() { + void shouldRenderIsoWeekYearCorrectly() { Document agg = project().and(DateOperators.dateOf("date").isoWeekYear()).as("yearNumber") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -1948,7 +1949,7 @@ public void shouldRenderIsoWeekYearCorrectly() { } @Test // DATAMONGO-1834 - public void shouldRenderIsoWeekYearWithTimezoneCorrectly() { + void shouldRenderIsoWeekYearWithTimezoneCorrectly() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).isoWeekYear()) @@ -1959,7 +1960,7 @@ public void shouldRenderIsoWeekYearWithTimezoneCorrectly() { } @Test // DATAMONGO-1548 - public void shouldRenderSwitchCorrectly() { + void shouldRenderSwitchCorrectly() { String expected = "$switch:\n" + // "{\n" + // @@ -2001,7 +2002,7 @@ public void shouldRenderSwitchCorrectly() { } @Test // DATAMONGO-1548 - public void shouldTypeCorrectly() { + void shouldTypeCorrectly() { Document agg = project().and(DataTypeOperators.Type.typeOf("a")).as("a").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2009,7 +2010,7 @@ public void shouldTypeCorrectly() { } @Test // DATAMONGO-1834 - public void shouldRenderDateFromPartsWithJustTheYear() { + void shouldRenderDateFromPartsWithJustTheYear() { Document agg = project().and(DateOperators.dateFromParts().year(2018)).as("newDate") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2018,7 +2019,7 @@ public void shouldRenderDateFromPartsWithJustTheYear() { } @Test // DATAMONGO-1834, DATAMONGO-2671 - public void shouldRenderDateFromParts() { + void shouldRenderDateFromParts() { Document agg = project() .and(DateOperators.dateFromParts().year(2018).month(3).day(23).hour(14).minute(25).second(10).millisecond(2)) @@ -2029,7 +2030,7 @@ public void shouldRenderDateFromParts() { } @Test // DATAMONGO-1834 - public void shouldRenderDateFromPartsWithTimezone() { + void shouldRenderDateFromPartsWithTimezone() { Document agg = project() .and(DateOperators.dateFromParts().withTimezone(Timezone.valueOf("America/Chicago")).year(2018)).as("newDate") @@ -2040,7 +2041,7 @@ public void shouldRenderDateFromPartsWithTimezone() { } @Test // DATAMONGO-1834 - public void shouldRenderIsoDateFromPartsWithJustTheYear() { + void shouldRenderIsoDateFromPartsWithJustTheYear() { Document agg = project().and(DateOperators.dateFromParts().isoWeekYear(2018)).as("newDate") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2049,7 +2050,7 @@ public void shouldRenderIsoDateFromPartsWithJustTheYear() { } @Test // DATAMONGO-1834, DATAMONGO-2671 - public void shouldRenderIsoDateFromParts() { + void shouldRenderIsoDateFromParts() { Document agg = project().and(DateOperators.dateFromParts().isoWeekYear(2018).isoWeek(12).isoDayOfWeek(5).hour(14) .minute(30).second(42).millisecond(2)).as("newDate").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2059,7 +2060,7 @@ public void shouldRenderIsoDateFromParts() { } @Test // DATAMONGO-1834 - public void shouldRenderIsoDateFromPartsWithTimezone() { + void shouldRenderIsoDateFromPartsWithTimezone() { Document agg = project() .and(DateOperators.dateFromParts().withTimezone(Timezone.valueOf("America/Chicago")).isoWeekYear(2018)) @@ -2070,7 +2071,7 @@ public void shouldRenderIsoDateFromPartsWithTimezone() { } @Test // DATAMONGO-1834 - public void shouldRenderDateToParts() { + void shouldRenderDateToParts() { Document agg = project().and(DateOperators.dateOf("date").toParts()).as("newDate") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2079,7 +2080,7 @@ public void shouldRenderDateToParts() { } @Test // DATAMONGO-1834 - public void shouldRenderDateToIsoParts() { + void shouldRenderDateToIsoParts() { Document agg = project().and(DateOperators.dateOf("date").toParts().iso8601()).as("newDate") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2089,7 +2090,7 @@ public void shouldRenderDateToIsoParts() { } @Test // DATAMONGO-1834 - public void shouldRenderDateToPartsWithTimezone() { + void shouldRenderDateToPartsWithTimezone() { Document agg = project() .and(DateOperators.dateOf("date").withTimezone(Timezone.valueOf("America/Chicago")).toParts()).as("newDate") @@ -2100,7 +2101,7 @@ public void shouldRenderDateToPartsWithTimezone() { } @Test // DATAMONGO-1834 - public void shouldRenderDateFromString() { + void shouldRenderDateFromString() { Document agg = project().and(DateOperators.dateFromString("2017-02-08T12:10:40.787")).as("newDate") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2110,7 +2111,7 @@ public void shouldRenderDateFromString() { } @Test // DATAMONGO-1834 - public void shouldRenderDateFromStringWithFieldReference() { + void shouldRenderDateFromStringWithFieldReference() { Document agg = project().and(DateOperators.dateOf("date").fromString()).as("newDate") .toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2120,7 +2121,7 @@ public void shouldRenderDateFromStringWithFieldReference() { } @Test // DATAMONGO-1834 - public void shouldRenderDateFromStringWithTimezone() { + void shouldRenderDateFromStringWithTimezone() { Document agg = project() .and(DateOperators.dateFromString("2017-02-08T12:10:40.787").withTimezone(Timezone.valueOf("America/Chicago"))) @@ -2131,7 +2132,7 @@ public void shouldRenderDateFromStringWithTimezone() { } @Test // DATAMONGO-2047 - public void shouldRenderDateFromStringWithFormat() { + void shouldRenderDateFromStringWithFormat() { Document agg = project().and(DateOperators.dateFromString("2017-02-08T12:10:40.787").withFormat("dd/mm/yyyy")) .as("newDate").toDocument(Aggregation.DEFAULT_CONTEXT); @@ -2141,7 +2142,7 @@ public void shouldRenderDateFromStringWithFormat() { } @Test // DATAMONGO-2200 - public void typeProjectionShouldIncludeTopLevelFieldsOfType() { + void typeProjectionShouldIncludeTopLevelFieldsOfType() { ProjectionOperation operation = Aggregation.project(Book.class); @@ -2155,7 +2156,7 @@ public void typeProjectionShouldIncludeTopLevelFieldsOfType() { } @Test // DATAMONGO-2200 - public void typeProjectionShouldMapFieldNames() { + void typeProjectionShouldMapFieldNames() { MongoMappingContext mappingContext = new MongoMappingContext(); MongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); @@ -2171,7 +2172,7 @@ public void typeProjectionShouldMapFieldNames() { } @Test // DATAMONGO-2200 - public void typeProjectionShouldIncludeInterfaceProjectionValues() { + void typeProjectionShouldIncludeInterfaceProjectionValues() { ProjectionOperation operation = Aggregation.project(ProjectionInterface.class); @@ -2184,7 +2185,7 @@ public void typeProjectionShouldIncludeInterfaceProjectionValues() { } @Test // DATAMONGO-2200 - public void typeProjectionShouldBeEmptyIfNoPropertiesFound() { + void typeProjectionShouldBeEmptyIfNoPropertiesFound() { ProjectionOperation operation = Aggregation.project(EmptyType.class); @@ -2195,7 +2196,7 @@ public void typeProjectionShouldBeEmptyIfNoPropertiesFound() { } @Test // DATAMONGO-2312 - public void simpleFieldReferenceAsArray() { + void simpleFieldReferenceAsArray() { org.bson.Document doc = Aggregation.newAggregation(project("x", "y", "someField").asArray("myArray")) .toDocument("coll", Aggregation.DEFAULT_CONTEXT); @@ -2205,7 +2206,7 @@ public void simpleFieldReferenceAsArray() { } @Test // DATAMONGO-2312 - public void mappedFieldReferenceAsArray() { + void mappedFieldReferenceAsArray() { MongoMappingContext mappingContext = new MongoMappingContext(); @@ -2219,7 +2220,7 @@ public void mappedFieldReferenceAsArray() { } @Test // DATAMONGO-2312 - public void arrayWithNullValue() { + void arrayWithNullValue() { Document doc = project() // .andArrayOf(Fields.field("field-1"), null, "value").as("myArray") // @@ -2229,7 +2230,7 @@ public void arrayWithNullValue() { } @Test // DATAMONGO-2312 - public void nestedArrayField() { + void nestedArrayField() { Document doc = project("_id", "value") // .andArrayOf(Fields.field("field-1"), "plain - string", ArithmeticOperators.valueOf("field-1").sum().and(10)) @@ -2241,7 +2242,7 @@ public void nestedArrayField() { } @Test // DATAMONGO-2312 - public void nestedMappedFieldReferenceInArrayField() { + void nestedMappedFieldReferenceInArrayField() { MongoMappingContext mappingContext = new MongoMappingContext(); @@ -2289,7 +2290,7 @@ interface ProjectionInterface { String getTitle(); } - static class EmptyType { + private static class EmptyType { } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 41b0323636..e92ea38336 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -32,6 +32,7 @@ * @author Thomas Darimont * @author Oliver Gierke * @author Christoph Strobl + * @author Divya Srivastava */ public class SpelExpressionTransformerUnitTests { @@ -800,68 +801,68 @@ void shouldRenderRtrimWithCharsFromFieldReference() { assertThat(transform("rtrim(field1, field2)")) .isEqualTo("{ \"$rtrim\" : {\"input\" : \"$field1\", \"chars\" : \"$field2\" }}"); } - - @Test // DATAMONGO-3725 - public void shouldRenderRegexFindWithoutOptions() { - + + @Test // GH-3725 + void shouldRenderRegexFindWithoutOptions() { + assertThat(transform("regexFind(field1,'e')")) - .isEqualTo(Document.parse("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}")); + .isEqualTo("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}"); } - - @Test // DATAMONGO-3725 - public void shouldRenderRegexFindWithOptions() { - + + @Test // GH-3725 + void shouldRenderRegexFindWithOptions() { + assertThat(transform("regexFind(field1,'e','i')")) - .isEqualTo(Document.parse("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}")); + .isEqualTo("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}"); } - - @Test // DATAMONGO-3725 - public void shouldRenderRegexFindWithOptionsFromFieldReference() { - + + @Test // GH-3725 + void shouldRenderRegexFindWithOptionsFromFieldReference() { + assertThat(transform("regexFind(field1,'e',field2)")) - .isEqualTo(Document.parse("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}")); + .isEqualTo("{ \"$regexFind\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}"); } - - @Test // DATAMONGO-3725 - public void shouldRenderRegexFindAllWithoutOptions() { - + + @Test // GH-3725 + void shouldRenderRegexFindAllWithoutOptions() { + assertThat(transform("regexFindAll(field1,'e')")) - .isEqualTo(Document.parse("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}")); + .isEqualTo("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}"); } - - @Test // DATAMONGO-3725 - public void shouldRenderRegexFindAllWithOptions() { - + + @Test // GH-3725 + void shouldRenderRegexFindAllWithOptions() { + assertThat(transform("regexFindAll(field1,'e','i')")) - .isEqualTo(Document.parse("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}")); + .isEqualTo("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}"); } - - @Test // DATAMONGO-3725 - public void shouldRenderRegexFindAllWithOptionsFromFieldReference() { - + + @Test // GH-3725 + void shouldRenderRegexFindAllWithOptionsFromFieldReference() { + assertThat(transform("regexFindAll(field1,'e',field2)")) - .isEqualTo(Document.parse("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}")); + .isEqualTo("{ \"$regexFindAll\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}"); } - @Test // DATAMONGO-3725 - public void shouldRenderRegexMatchWithoutOptions() { - + @Test // GH-3725 + void shouldRenderRegexMatchWithoutOptions() { + assertThat(transform("regexMatch(field1,'e')")) - .isEqualTo(Document.parse("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}")); + .isEqualTo("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\"}}"); } - - @Test // DATAMONGO-3725 - public void shouldRenderRegexMatchWithOptions() { - + + @Test // GH-3725 + void shouldRenderRegexMatchWithOptions() { + assertThat(transform("regexMatch(field1,'e','i')")) - .isEqualTo(Document.parse("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}")); + .isEqualTo("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"i\"}}"); } - - @Test // DATAMONGO-3725 - public void shouldRenderRegexMatchWithOptionsFromFieldReference() { - + + @Test // GH-3725 + void shouldRenderRegexMatchWithOptionsFromFieldReference() { + assertThat(transform("regexMatch(field1,'e',field2)")) - .isEqualTo(Document.parse("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}")); + .isEqualTo("{ \"$regexMatch\" : {\"input\" : \"$field1\" , \"regex\" : \"e\" , \"options\" : \"$field2\"}}"); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java index cdd0b38dbc..d8ba5129e0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/StringOperatorsUnitTests.java @@ -15,7 +15,9 @@ */ package org.springframework.data.mongodb.core.aggregation; -import static org.assertj.core.api.Assertions.*; +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import java.util.regex.Pattern; import org.bson.Document; import org.junit.jupiter.api.Test; @@ -25,230 +27,258 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Divya Srivastava * @currentRead Royal Assassin - Robin Hobb */ -public class StringOperatorsUnitTests { +class StringOperatorsUnitTests { - static final String EXPRESSION_STRING = "{ \"$fitz\" : \"chivalry\" }"; - static final Document EXPRESSION_DOC = Document.parse(EXPRESSION_STRING); - static final AggregationExpression EXPRESSION = context -> EXPRESSION_DOC; + private static final String EXPRESSION_STRING = "{ \"$fitz\" : \"chivalry\" }"; + private static final Document EXPRESSION_DOC = Document.parse(EXPRESSION_STRING); + private static final AggregationExpression EXPRESSION = context -> EXPRESSION_DOC; @Test // DATAMONGO-2049 - public void shouldRenderTrim() { + void shouldRenderTrim() { assertThat(StringOperators.valueOf("shrewd").trim().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $trim: { \"input\" : \"$shrewd\" } } ")); + .isEqualTo("{ $trim: { \"input\" : \"$shrewd\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderTrimForExpression() { + void shouldRenderTrimForExpression() { assertThat(StringOperators.valueOf(EXPRESSION).trim().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $trim: { \"input\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $trim: { \"input\" : " + EXPRESSION_STRING + " } } "); } @Test // DATAMONGO-2049 - public void shouldRenderTrimWithChars() { + void shouldRenderTrimWithChars() { assertThat(StringOperators.valueOf("shrewd").trim("sh").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $trim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } ")); + .isEqualTo("{ $trim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderTrimWithCharsExpression() { + void shouldRenderTrimWithCharsExpression() { assertThat(StringOperators.valueOf("shrewd").trim(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $trim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $trim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } "); } @Test // DATAMONGO-2049 - public void shouldRenderTrimLeft() { + void shouldRenderTrimLeft() { assertThat(StringOperators.valueOf("shrewd").trim().left().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $ltrim: { \"input\" : \"$shrewd\" } } ")); + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderTrimLeftWithChars() { + void shouldRenderTrimLeftWithChars() { assertThat(StringOperators.valueOf("shrewd").trim("sh").left().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } ")); + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderTrimRight() { + void shouldRenderTrimRight() { assertThat(StringOperators.valueOf("shrewd").trim().right().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $rtrim: { \"input\" : \"$shrewd\" } } ")); + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderTrimRightWithChars() { + void shouldRenderTrimRightWithChars() { assertThat(StringOperators.valueOf("shrewd").trim("sh").right().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } ")); + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderLTrim() { + void shouldRenderLTrim() { assertThat(StringOperators.valueOf("shrewd").ltrim().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $ltrim: { \"input\" : \"$shrewd\" } } ")); + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderLTrimForExpression() { + void shouldRenderLTrimForExpression() { assertThat(StringOperators.valueOf(EXPRESSION).ltrim().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $ltrim: { \"input\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $ltrim: { \"input\" : " + EXPRESSION_STRING + " } } "); } @Test // DATAMONGO-2049 - public void shouldRenderLTrimWithChars() { + void shouldRenderLTrimWithChars() { assertThat(StringOperators.valueOf("shrewd").ltrim("sh").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } ")); + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderLTrimWithCharsExpression() { + void shouldRenderLTrimWithCharsExpression() { assertThat(StringOperators.valueOf("shrewd").ltrim(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $ltrim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } "); } @Test // DATAMONGO-2049 - public void shouldRenderRTrim() { + void shouldRenderRTrim() { assertThat(StringOperators.valueOf("shrewd").rtrim().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $rtrim: { \"input\" : \"$shrewd\" } } ")); + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderRTrimForExpression() { + void shouldRenderRTrimForExpression() { assertThat(StringOperators.valueOf(EXPRESSION).rtrim().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $rtrim: { \"input\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $rtrim: { \"input\" : " + EXPRESSION_STRING + " } } "); } @Test // DATAMONGO-2049 - public void shouldRenderRTrimWithChars() { + void shouldRenderRTrimWithChars() { assertThat(StringOperators.valueOf("shrewd").rtrim("sh").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } ")); + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : \"sh\" } } "); } @Test // DATAMONGO-2049 - public void shouldRenderRTrimWithCharsExpression() { + void shouldRenderRTrimWithCharsExpression() { assertThat(StringOperators.valueOf("shrewd").rtrim(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $rtrim: { \"input\" : \"$shrewd\", \"chars\" : " + EXPRESSION_STRING + " } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindAll() { + + @Test // GH-3725 + void shouldRenderRegexFindAll() { assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }")); + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }"); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindAllForExpression() { + + @Test // GH-3725 + void shouldRenderRegexFindAllForExpression() { assertThat(StringOperators.valueOf(EXPRESSION).regexFindAll("e").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } ")); + .isEqualTo("{ $regexFindAll: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindAllForRegexExpression() { + + @Test // GH-3725 + void shouldRenderRegexFindAllForRegexExpression() { assertThat(StringOperators.valueOf("shrewd").regexFindAll(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // GH-3725 + void shouldRenderRegexFindAllWithPattern() { + + assertThat(StringOperators.valueOf("shrewd") + .regexFindAll( + Pattern.compile("foo", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE | Pattern.DOTALL | Pattern.COMMENTS)) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"foo\" , \"options\" : \"imsx\" } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindAllWithOptions() { + + @Test // GH-3725 + void shouldRenderRegexFindAllWithOptions() { assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } ")); + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindAllWithOptionsExpression() { + + @Test // GH-3725 + void shouldRenderRegexFindAllWithOptionsExpression() { assertThat(StringOperators.valueOf("shrewd").regexFindAll("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $regexFindAll: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + + " } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexMatch() { + + @Test // GH-3725 + void shouldRenderRegexMatch() { assertThat(StringOperators.valueOf("shrewd").regexMatch("e").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }")); + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }"); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexMatchForExpression() { + + @Test // GH-3725 + void shouldRenderRegexMatchForExpression() { assertThat(StringOperators.valueOf(EXPRESSION).regexMatch("e").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } ")); + .isEqualTo("{ $regexMatch: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexMatchForRegexExpression() { + + @Test // GH-3725 + void shouldRenderRegexMatchForRegexExpression() { assertThat(StringOperators.valueOf("shrewd").regexMatch(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } "); + } + + @Test // GH-3725 + void shouldRenderRegexMatchForPattern() { + + assertThat(StringOperators.valueOf("shrewd").regexMatch(Pattern.compile("foo", Pattern.CASE_INSENSITIVE)) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\" , \"regex\" : \"foo\", \"options\" : \"i\"} } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexMatchWithOptions() { + + @Test // GH-3725 + void shouldRenderRegexMatchWithOptions() { assertThat(StringOperators.valueOf("shrewd").regexMatch("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } ")); + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexMatchWithOptionsExpression() { + + @Test // GH-3725 + void shouldRenderRegexMatchWithOptionsExpression() { assertThat(StringOperators.valueOf("shrewd").regexMatch("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexMatch: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $regexMatch: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + + " } } "); } - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFind() { + @Test // GH-3725 + void shouldRenderRegexFind() { assertThat(StringOperators.valueOf("shrewd").regexFind("e").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }")); + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : \"e\" } }"); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindForExpression() { + + @Test // GH-3725 + void shouldRenderRegexFindForExpression() { assertThat(StringOperators.valueOf(EXPRESSION).regexFind("e").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFind: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } ")); + .isEqualTo("{ $regexFind: { \"input\" : " + EXPRESSION_STRING + " , \"regex\" : \"e\" } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindForRegexExpression() { + + @Test // GH-3725 + void shouldRenderRegexFindForRegexExpression() { assertThat(StringOperators.valueOf("shrewd").regexFind(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } ")); + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : " + EXPRESSION_STRING + " } } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindWithOptions() { - assertThat(StringOperators.valueOf("shrewd").regexFind("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFind: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } ")); + @Test // GH-3725 + void shouldRenderRegexFindForPattern() { + + assertThat(StringOperators.valueOf("shrewd").regexFind(Pattern.compile("foo", Pattern.MULTILINE)) + .toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\" , \"regex\" : \"foo\", \"options\" : \"m\"} } "); } - - @Test // DATAMONGO - 3725 - public void shouldRenderRegexFindWithOptionsExpression() { - assertThat(StringOperators.valueOf("shrewd").regexFind("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo(Document.parse("{ $regexFind: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + " } } ")); + @Test // GH-3725 + void shouldRenderRegexFindWithOptions() { + + assertThat(StringOperators.valueOf("shrewd").regexFind("e").options("i").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : \"i\" } } "); } + @Test // GH-3725 + void shouldRenderRegexFindWithOptionsExpression() { + assertThat(StringOperators.valueOf("shrewd").regexFind("e").optionsOf(EXPRESSION).toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $regexFind: { \"input\" : \"$shrewd\", \"regex\" : \"e\" , \"options\" : " + EXPRESSION_STRING + + " } } "); + } } diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index f96719adde..75ed415096 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -88,7 +88,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `abs`, `add` (+++*+++ via `plus`), `ceil`, `cos`, `cosh`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `sin`, `sinh`, `tan`, `tanh`, `trunc` | String Aggregation Operators -| `concat`, `substr`, `toLower`, `toUpper`, `strcasecmp`, `indexOfBytes`, `indexOfCP`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` +| `concat`, `substr`, `toLower`, `toUpper`, `strcasecmp`, `indexOfBytes`, `indexOfCP`, `regexFind`, `regexFindAll`, `regexMatch`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` | Comparison Aggregation Operators | `eq` (+++*+++ via `is`), `gt`, `gte`, `lt`, `lte`, `ne` From 62eb719b1e9adb050c33be254cbe9bf7a527415e Mon Sep 17 00:00:00 2001 From: James McNee Date: Thu, 26 Aug 2021 21:25:36 +0100 Subject: [PATCH 111/983] Add support for `$sampleRate` criteria. Closes #3726 Original pull request: #3765. --- .../data/mongodb/core/query/Criteria.java | 16 ++++++++++++++++ .../mongodb/core/query/CriteriaUnitTests.java | 16 ++++++++++++++++ .../asciidoc/reference/mongo-repositories.adoc | 4 ++++ 3 files changed, 36 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java index f9a354c38f..3ec4caf3c7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java @@ -64,6 +64,7 @@ * @author Andreas Zink * @author Ziemowit Stolarczyk * @author Clément Petit + * @author James McNee */ public class Criteria implements CriteriaDefinition { @@ -390,6 +391,21 @@ public Criteria exists(boolean value) { return this; } + /** + * Creates a criterion using the {@literal $sampleRate} operator. + * + * @param sampleRate sample rate to determine number of documents to be randomly selected from the input. + * @return this. + * @see MongoDB Query operator: $sampleRate + */ + public Criteria sampleRate(double sampleRate) { + Assert.isTrue(sampleRate >= 0, "The sample rate must be greater than zero!"); + Assert.isTrue(sampleRate <= 1, "The sample rate must not be greater than one!"); + + criteria.put("$sampleRate", sampleRate); + return this; + } + /** * Creates a criterion using the {@literal $type} operator. * diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java index 9edf3c43fd..e24fc34bef 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java @@ -156,6 +156,22 @@ public void shouldNegateFollowingSimpleExpression() { assertThat(co).isEqualTo(Document.parse("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}")); } + @Test // GH-3726 + public void shouldBuildCorrectSampleRateOperation() { + Criteria c = new Criteria().sampleRate(0.4); + assertThat(c.getCriteriaObject()).isEqualTo(Document.parse("{ \"$sampleRate\" : 0.4 }")); + } + + @Test // GH-3726 + public void shouldThrowExceptionWhenSampleRateIsNegative() { + assertThatIllegalArgumentException().isThrownBy(() -> new Criteria().sampleRate(-1)); + } + + @Test // GH-3726 + public void shouldThrowExceptionWhenSampleRateIsGreatedThanOne() { + assertThatIllegalArgumentException().isThrownBy(() -> new Criteria().sampleRate(1.01)); + } + @Test // DATAMONGO-1068 public void getCriteriaObjectShouldReturnEmptyDocumentWhenNoCriteriaSpecified() { diff --git a/src/main/asciidoc/reference/mongo-repositories.adoc b/src/main/asciidoc/reference/mongo-repositories.adoc index 328a547b5a..b847174c67 100644 --- a/src/main/asciidoc/reference/mongo-repositories.adoc +++ b/src/main/asciidoc/reference/mongo-repositories.adoc @@ -281,6 +281,10 @@ lower / upper bounds (`$gt` / `$gte` & `$lt` / `$lte`) according to `Range` | `Exists` | `findByLocationExists(boolean exists)` | `{"location" : {"$exists" : exists }}` + +| `SampleRate` +| `sampleRate(double sampleRate)` +| `{"$sampleRate" : sampleRate }` |=== NOTE: If the property criterion compares a document, the order of the fields and exact equality in the document matters. From f662d7ca0d240a9f719ffec78243fc7661c544a1 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 27 Aug 2021 09:34:40 +0200 Subject: [PATCH 112/983] Polishing. Tweak Javadoc. Add since tag, reformat code. Simplify tests. Move documentation bits into the right place. See #3726. Original pull request: #3765. --- .../data/mongodb/core/query/Criteria.java | 8 ++++-- .../mongodb/core/query/CriteriaUnitTests.java | 27 ++++++++++--------- .../reference/mongo-repositories.adoc | 4 --- src/main/asciidoc/reference/mongodb.adoc | 1 + 4 files changed, 21 insertions(+), 19 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java index 3ec4caf3c7..df167330a1 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Criteria.java @@ -394,11 +394,15 @@ public Criteria exists(boolean value) { /** * Creates a criterion using the {@literal $sampleRate} operator. * - * @param sampleRate sample rate to determine number of documents to be randomly selected from the input. + * @param sampleRate sample rate to determine number of documents to be randomly selected from the input. Must be + * between {@code 0} and {@code 1}. * @return this. - * @see MongoDB Query operator: $sampleRate + * @see MongoDB Query operator: + * $sampleRate + * @since 3.3 */ public Criteria sampleRate(double sampleRate) { + Assert.isTrue(sampleRate >= 0, "The sample rate must be greater than zero!"); Assert.isTrue(sampleRate <= 1, "The sample rate must not be greater than one!"); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java index e24fc34bef..96253e4ac0 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/CriteriaUnitTests.java @@ -40,19 +40,20 @@ * @author Ziemowit Stolarczyk * @author Clément Petit * @author Mark Paluch + * @author James McNee */ public class CriteriaUnitTests { @Test public void testSimpleCriteria() { Criteria c = new Criteria("name").is("Bubba"); - assertThat(c.getCriteriaObject()).isEqualTo(Document.parse("{ \"name\" : \"Bubba\"}")); + assertThat(c.getCriteriaObject()).isEqualTo("{ \"name\" : \"Bubba\"}"); } @Test public void testNotEqualCriteria() { Criteria c = new Criteria("name").ne("Bubba"); - assertThat(c.getCriteriaObject()).isEqualTo(Document.parse("{ \"name\" : { \"$ne\" : \"Bubba\"}}")); + assertThat(c.getCriteriaObject()).isEqualTo("{ \"name\" : { \"$ne\" : \"Bubba\"}}"); } @Test @@ -67,7 +68,7 @@ public void buildsIsNullCriteriaCorrectly() { @Test public void testChainedCriteria() { Criteria c = new Criteria("name").is("Bubba").and("age").lt(21); - assertThat(c.getCriteriaObject()).isEqualTo(Document.parse("{ \"name\" : \"Bubba\" , \"age\" : { \"$lt\" : 21}}")); + assertThat(c.getCriteriaObject()).isEqualTo("{ \"name\" : \"Bubba\" , \"age\" : { \"$lt\" : 21}}"); } @Test(expected = InvalidMongoDbApiUsageException.class) @@ -153,13 +154,13 @@ public void shouldNegateFollowingSimpleExpression() { Document co = c.getCriteriaObject(); assertThat(co).isNotNull(); - assertThat(co).isEqualTo(Document.parse("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}")); + assertThat(co).isEqualTo("{ \"age\" : { \"$not\" : { \"$gt\" : 18}} , \"status\" : \"student\"}"); } @Test // GH-3726 public void shouldBuildCorrectSampleRateOperation() { Criteria c = new Criteria().sampleRate(0.4); - assertThat(c.getCriteriaObject()).isEqualTo(Document.parse("{ \"$sampleRate\" : 0.4 }")); + assertThat(c.getCriteriaObject()).isEqualTo("{ \"$sampleRate\" : 0.4 }"); } @Test // GH-3726 @@ -302,7 +303,7 @@ public void shouldAppendBitsAllClearWithIntBitmaskCorrectly() { Criteria numericBitmaskCriteria = new Criteria("field").bits().allClear(0b101); assertThat(numericBitmaskCriteria.getCriteriaObject()) - .isEqualTo(Document.parse("{ \"field\" : { \"$bitsAllClear\" : 5} }")); + .isEqualTo("{ \"field\" : { \"$bitsAllClear\" : 5} }"); } @Test // DATAMONGO-1808 @@ -311,7 +312,7 @@ public void shouldAppendBitsAllClearWithPositionListCorrectly() { Criteria bitPositionsBitmaskCriteria = new Criteria("field").bits().allClear(Arrays.asList(0, 2)); assertThat(bitPositionsBitmaskCriteria.getCriteriaObject()) - .isEqualTo(Document.parse("{ \"field\" : { \"$bitsAllClear\" : [ 0, 2 ]} }")); + .isEqualTo("{ \"field\" : { \"$bitsAllClear\" : [ 0, 2 ]} }"); } @Test // DATAMONGO-1808 @@ -320,7 +321,7 @@ public void shouldAppendBitsAllSetWithIntBitmaskCorrectly() { Criteria numericBitmaskCriteria = new Criteria("field").bits().allSet(0b101); assertThat(numericBitmaskCriteria.getCriteriaObject()) - .isEqualTo(Document.parse("{ \"field\" : { \"$bitsAllSet\" : 5} }")); + .isEqualTo("{ \"field\" : { \"$bitsAllSet\" : 5} }"); } @Test // DATAMONGO-1808 @@ -329,7 +330,7 @@ public void shouldAppendBitsAllSetWithPositionListCorrectly() { Criteria bitPositionsBitmaskCriteria = new Criteria("field").bits().allSet(Arrays.asList(0, 2)); assertThat(bitPositionsBitmaskCriteria.getCriteriaObject()) - .isEqualTo(Document.parse("{ \"field\" : { \"$bitsAllSet\" : [ 0, 2 ]} }")); + .isEqualTo("{ \"field\" : { \"$bitsAllSet\" : [ 0, 2 ]} }"); } @Test // DATAMONGO-1808 @@ -338,7 +339,7 @@ public void shouldAppendBitsAnyClearWithIntBitmaskCorrectly() { Criteria numericBitmaskCriteria = new Criteria("field").bits().anyClear(0b101); assertThat(numericBitmaskCriteria.getCriteriaObject()) - .isEqualTo(Document.parse("{ \"field\" : { \"$bitsAnyClear\" : 5} }")); + .isEqualTo("{ \"field\" : { \"$bitsAnyClear\" : 5} }"); } @Test // DATAMONGO-1808 @@ -347,7 +348,7 @@ public void shouldAppendBitsAnyClearWithPositionListCorrectly() { Criteria bitPositionsBitmaskCriteria = new Criteria("field").bits().anyClear(Arrays.asList(0, 2)); assertThat(bitPositionsBitmaskCriteria.getCriteriaObject()) - .isEqualTo(Document.parse("{ \"field\" : { \"$bitsAnyClear\" : [ 0, 2 ]} }")); + .isEqualTo("{ \"field\" : { \"$bitsAnyClear\" : [ 0, 2 ]} }"); } @Test // DATAMONGO-1808 @@ -356,7 +357,7 @@ public void shouldAppendBitsAnySetWithIntBitmaskCorrectly() { Criteria numericBitmaskCriteria = new Criteria("field").bits().anySet(0b101); assertThat(numericBitmaskCriteria.getCriteriaObject()) - .isEqualTo(Document.parse("{ \"field\" : { \"$bitsAnySet\" : 5} }")); + .isEqualTo("{ \"field\" : { \"$bitsAnySet\" : 5} }"); } @Test // DATAMONGO-1808 @@ -365,7 +366,7 @@ public void shouldAppendBitsAnySetWithPositionListCorrectly() { Criteria bitPositionsBitmaskCriteria = new Criteria("field").bits().anySet(Arrays.asList(0, 2)); assertThat(bitPositionsBitmaskCriteria.getCriteriaObject()) - .isEqualTo(Document.parse("{ \"field\" : { \"$bitsAnySet\" : [ 0, 2 ]} }")); + .isEqualTo("{ \"field\" : { \"$bitsAnySet\" : [ 0, 2 ]} }"); } @Test // DATAMONGO-2002 diff --git a/src/main/asciidoc/reference/mongo-repositories.adoc b/src/main/asciidoc/reference/mongo-repositories.adoc index b847174c67..328a547b5a 100644 --- a/src/main/asciidoc/reference/mongo-repositories.adoc +++ b/src/main/asciidoc/reference/mongo-repositories.adoc @@ -281,10 +281,6 @@ lower / upper bounds (`$gt` / `$gte` & `$lt` / `$lte`) according to `Range` | `Exists` | `findByLocationExists(boolean exists)` | `{"location" : {"$exists" : exists }}` - -| `SampleRate` -| `sampleRate(double sampleRate)` -| `{"$sampleRate" : sampleRate }` |=== NOTE: If the property criterion compares a document, the order of the fields and exact equality in the document matters. diff --git a/src/main/asciidoc/reference/mongodb.adoc b/src/main/asciidoc/reference/mongodb.adoc index f214edba4c..7bf034f461 100644 --- a/src/main/asciidoc/reference/mongodb.adoc +++ b/src/main/asciidoc/reference/mongodb.adoc @@ -1219,6 +1219,7 @@ The `Criteria` class provides the following methods, all of which correspond to * `Criteria` *orOperator* `(Criteria... criteria)` Creates an or query using the `$or` operator for all of the provided criteria * `Criteria` *orOperator* `(Collection criteria)` Creates an or query using the `$or` operator for all of the provided criteria * `Criteria` *regex* `(String re)` Creates a criterion using a `$regex` +* `Criteria` *sampleRate* `(double sampleRate)` Creates a criterion using the `$sampleRate` operator * `Criteria` *size* `(int s)` Creates a criterion using the `$size` operator * `Criteria` *type* `(int t)` Creates a criterion using the `$type` operator * `Criteria` *matchingDocumentStructure* `(MongoJsonSchema schema)` Creates a criterion using the `$jsonSchema` operator for <>. `$jsonSchema` can only be applied on the top level of a query and not property specific. Use the `properties` attribute of the schema to match against nested fields. From bf86f39b2d5bc2d3c197fbff90551b85b440474a Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 24 Aug 2021 07:31:25 +0200 Subject: [PATCH 113/983] Fix id field target type conversion for document references. This commit fixes an issue where a defined custom target type conversion for the id field was not properly considered when writing a document reference. Previously an eg. String was not being converted into an ObjectId correctly causing lookup queries to return empty results. Converting the id property value on write solves the issue. Includes a minor polish in the mapping centralizing pointer creation within the DocumentPointerFactory. Closes: #3782 Original pull request: #3785. --- .../core/convert/DocumentPointerFactory.java | 11 ++- .../core/convert/MappingMongoConverter.java | 23 ++--- .../MongoTemplateDocumentReferenceTests.java | 98 ++++++++++++++++++- 3 files changed, 114 insertions(+), 18 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java index 09d69e4b27..b30aa957de 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentPointerFactory.java @@ -83,7 +83,16 @@ DocumentPointer computePointer( .getRequiredPersistentEntity(property.getAssociationTargetType()); if (usesDefaultLookup(property)) { - return () -> persistentEntity.getIdentifierAccessor(value).getIdentifier(); + + MongoPersistentProperty idProperty = persistentEntity.getIdProperty(); + Object idValue = persistentEntity.getIdentifierAccessor(value).getIdentifier(); + + if (idProperty.hasExplicitWriteTarget() + && conversionService.canConvert(idValue.getClass(), idProperty.getFieldType())) { + return () -> conversionService.convert(idValue, idProperty.getFieldType()); + } + + return () -> idValue; } MongoPersistentEntity valueEntity = mappingContext.getPersistentEntity(value.getClass()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 48505559c0..a60c853c33 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -869,15 +869,12 @@ protected List createCollection(Collection collection, MongoPersisten if (!property.isDbReference()) { if (property.isAssociation()) { - return writeCollectionInternal(collection.stream().map(it -> { - if (conversionService.canConvert(it.getClass(), DocumentPointer.class)) { - return conversionService.convert(it, DocumentPointer.class).getPointer(); - } else { - // just take the id as a reference - return mappingContext.getPersistentEntity(property.getAssociationTargetType()).getIdentifierAccessor(it) - .getIdentifier(); - } - }).collect(Collectors.toList()), ClassTypeInformation.from(DocumentPointer.class), new ArrayList<>()); + + List targetCollection = collection.stream().map(it -> { + return documentPointerFactory.computePointer(mappingContext, property, it, property.getActualType()).getPointer(); + }).collect(Collectors.toList()); + + return writeCollectionInternal(targetCollection, ClassTypeInformation.from(DocumentPointer.class), new ArrayList<>()); } if (property.hasExplicitWriteTarget()) { @@ -930,13 +927,7 @@ protected Bson createMap(Map map, MongoPersistentProperty proper if (property.isDbReference()) { document.put(simpleKey, value != null ? createDBRef(value, property) : null); } else { - if (conversionService.canConvert(value.getClass(), DocumentPointer.class)) { - document.put(simpleKey, conversionService.convert(value, DocumentPointer.class).getPointer()); - } else { - // just take the id as a reference - document.put(simpleKey, mappingContext.getPersistentEntity(property.getAssociationTargetType()) - .getIdentifierAccessor(value).getIdentifier()); - } + document.put(simpleKey, documentPointerFactory.computePointer(mappingContext, property, value, property.getActualType()).getPointer()); } } else { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java index fa1deb4f1c..d6bcc10e49 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java @@ -32,6 +32,7 @@ import java.util.Map; import org.bson.Document; +import org.bson.types.ObjectId; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; @@ -44,6 +45,8 @@ import org.springframework.data.mongodb.core.mapping.DocumentPointer; import org.springframework.data.mongodb.core.mapping.DocumentReference; import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.FieldType; +import org.springframework.data.mongodb.core.mapping.MongoId; import org.springframework.data.mongodb.core.query.Update; import org.springframework.data.mongodb.test.util.Client; import org.springframework.data.mongodb.test.util.MongoClientExtension; @@ -106,6 +109,26 @@ void writeSimpleTypeReference() { assertThat(target.get("simpleValueRef")).isEqualTo("ref-1"); } + @Test // GH-3782 + void writeTypeReferenceHavingCustomizedIdTargetType() { + + ObjectId expectedIdValue = new ObjectId(); + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + SingleRefRoot source = new SingleRefRoot(); + source.id = "root-1"; + source.customIdTargetRef = new ObjectRefHavingCustomizedIdTargetType(expectedIdValue.toString(), + "me-the-referenced-object"); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("customIdTargetRef")).isEqualTo(expectedIdValue); + } + @Test // GH-3602 void writeMapTypeReference() { @@ -126,6 +149,26 @@ void writeMapTypeReference() { assertThat(target.get("mapValueRef", Map.class)).containsEntry("frodo", "ref-1").containsEntry("bilbo", "ref-2"); } + @Test // GH-3782 + void writeMapOfTypeReferenceHavingCustomizedIdTargetType() { + + ObjectId expectedIdValue = new ObjectId(); + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot source = new CollectionRefRoot(); + source.id = "root-1"; + source.customIdTargetRefMap = Collections.singletonMap("frodo", + new ObjectRefHavingCustomizedIdTargetType(expectedIdValue.toString(), "me-the-referenced-object")); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("customIdTargetRefMap", Map.class)).containsEntry("frodo", expectedIdValue); + } + @Test // GH-3602 void writeCollectionOfSimpleTypeReference() { @@ -145,6 +188,26 @@ void writeCollectionOfSimpleTypeReference() { assertThat(target.get("simpleValueRef", List.class)).containsExactly("ref-1", "ref-2"); } + @Test // GH-3782 + void writeListOfTypeReferenceHavingCustomizedIdTargetType() { + + ObjectId expectedIdValue = new ObjectId(); + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + CollectionRefRoot source = new CollectionRefRoot(); + source.id = "root-1"; + source.customIdTargetRefList = Collections.singletonList( + new ObjectRefHavingCustomizedIdTargetType(expectedIdValue.toString(), "me-the-referenced-object")); + + template.save(source); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target.get("customIdTargetRefList", List.class)).containsExactly(expectedIdValue); + } + @Test // GH-3602 void writeObjectTypeReference() { @@ -739,6 +802,26 @@ void updateReferenceWithValue() { assertThat(target).containsEntry("toB", "b"); } + @Test // GH-3782 + void updateReferenceHavingCustomizedIdTargetType() { + + ObjectId expectedIdValue = new ObjectId(); + String rootCollectionName = template.getCollectionName(SingleRefRoot.class); + + SingleRefRoot root = new SingleRefRoot(); + root.id = "root-1"; + template.save(root); + + template.update(SingleRefRoot.class).apply(new Update().set("customIdTargetRef", + new ObjectRefHavingCustomizedIdTargetType(expectedIdValue.toString(), "b"))).first(); + + Document target = template.execute(db -> { + return db.getCollection(rootCollectionName).find(Filters.eq("_id", "root-1")).first(); + }); + + assertThat(target).containsEntry("customIdTargetRef", expectedIdValue); + } + @Test // GH-3602 void updateReferenceCollectionWithEntity() { @@ -998,6 +1081,8 @@ static class SingleRefRoot { @DocumentReference(lookup = "{ 'refKey1' : '?#{refKey1}', 'refKey2' : '?#{refKey2}' }", lazy = true) // ObjectRefOnNonIdField lazyObjectValueRefOnNonIdFields; + + @DocumentReference ObjectRefHavingCustomizedIdTargetType customIdTargetRef; } @Data @@ -1027,6 +1112,10 @@ static class CollectionRefRoot { @DocumentReference(lookup = "{ 'refKey1' : '?#{refKey1}', 'refKey2' : '?#{refKey2}' }") // List objectValueRefOnNonIdFields; + + @DocumentReference List customIdTargetRefList; + + @DocumentReference Map customIdTargetRefMap; } @FunctionalInterface @@ -1094,6 +1183,14 @@ public Object toReference() { } } + @Data + @AllArgsConstructor + static class ObjectRefHavingCustomizedIdTargetType { + + @MongoId(targetType = FieldType.OBJECT_ID) String id; + String name; + } + static class ReferencableConverter implements Converter> { @Nullable @@ -1196,5 +1293,4 @@ static class UsingAtReference { @Reference // Publisher publisher; } - } From f24e8e5361bf02484e20dc7799bc2a13b808873e Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 1 Sep 2021 10:39:36 +0200 Subject: [PATCH 114/983] Avoid nested Document conversion to primitive types for fields with an explicit write target. We now no longer attempt to convert query Documents into primitive types to avoid e.g. Document to String conversion. Closes: #3783 Original Pull Request: #3797 --- .../mongodb/core/convert/QueryMapper.java | 3 +- .../core/convert/QueryMapperUnitTests.java | 31 +++++++++++++++++-- 2 files changed, 31 insertions(+), 3 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index 7a14f07c4c..e7deb38231 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -778,7 +778,8 @@ protected boolean isKeyword(String candidate) { @Nullable private Object applyFieldTargetTypeHintToValue(Field documentField, @Nullable Object value) { - if (value == null || documentField.getProperty() == null || !documentField.getProperty().hasExplicitWriteTarget()) { + if (value == null || documentField.getProperty() == null || !documentField.getProperty().hasExplicitWriteTarget() + || value instanceof Document || value instanceof DBObject) { return value; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index efd354b866..808263697a 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -33,8 +33,7 @@ import org.bson.types.ObjectId; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.junit.jupiter.MockitoExtension; + import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; import org.springframework.data.annotation.Transient; @@ -83,9 +82,12 @@ public class QueryMapperUnitTests { @BeforeEach void beforeEach() { + MongoCustomConversions conversions = new MongoCustomConversions(); this.context = new MongoMappingContext(); + this.context.setSimpleTypeHolder(conversions.getSimpleTypeHolder()); this.converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, context); + this.converter.setCustomConversions(conversions); this.converter.afterPropertiesSet(); this.mapper = new QueryMapper(converter); @@ -1335,6 +1337,25 @@ void mapStringIdFieldProjection() { assertThat(mappedFields).containsEntry("_id", 1); } + @Test // GH-3783 + void retainsId$InWithStringArray() { + + org.bson.Document mappedQuery = mapper.getMappedObject( + org.bson.Document.parse("{ _id : { $in: [\"5b8bedceb1e0bfc07b008828\"]}}"), + context.getPersistentEntity(WithExplicitStringId.class)); + assertThat(mappedQuery.get("_id")).isEqualTo(org.bson.Document.parse("{ $in: [\"5b8bedceb1e0bfc07b008828\"]}")); + } + + @Test // GH-3783 + void mapsId$InInToObjectIds() { + + org.bson.Document mappedQuery = mapper.getMappedObject( + org.bson.Document.parse("{ _id : { $in: [\"5b8bedceb1e0bfc07b008828\"]}}"), + context.getPersistentEntity(ClassWithDefaultId.class)); + assertThat(mappedQuery.get("_id")) + .isEqualTo(org.bson.Document.parse("{ $in: [ {$oid: \"5b8bedceb1e0bfc07b008828\" } ]}")); + } + class WithDeepArrayNesting { List level0; @@ -1404,6 +1425,12 @@ class WithStringId { String name; } + class WithExplicitStringId { + + @MongoId(FieldType.STRING) String id; + String name; + } + class BigIntegerId { @Id private BigInteger id; From e71ec874ab69ecc3cfd199be5ce9cda76686913e Mon Sep 17 00:00:00 2001 From: divyajnu08 Date: Sun, 29 Aug 2021 16:41:52 +0530 Subject: [PATCH 115/983] Add support for `$expr` operator. Also, allow construction of $match with an AggregationExpression. Closes #3790 --- .../core/aggregation/AddFieldsOperation.java | 1 + .../mongodb/core/aggregation/Aggregation.java | 12 +- .../core/aggregation/EvaluationOperators.java | 109 ++++++++++++++++++ .../core/aggregation/MatchOperation.java | 39 ++++++- .../aggregation/ReplaceRootOperation.java | 1 + .../core/aggregation/SetOperation.java | 1 + .../aggregation/MatchOperationUnitTests.java | 26 +++++ .../aggregation/SetOperationUnitTests.java | 1 + 8 files changed, 187 insertions(+), 3 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java index 3f3dd125d1..90cc828591 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java @@ -201,4 +201,5 @@ public interface ValueAppender { AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values); } } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java index cecc8f2554..55964bab93 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java @@ -498,7 +498,17 @@ public static MatchOperation match(Criteria criteria) { public static MatchOperation match(CriteriaDefinition criteria) { return new MatchOperation(criteria); } - + + /** + * Creates a new {@link MatchOperation} + * + * @return new instance of {@link MatchOperation}. + * @since 1.10 + */ + public static MatchOperation match() { + return new MatchOperation(); + } + /** * Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The * {@code distanceField} defines output field that contains the calculated distance. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java new file mode 100644 index 0000000000..0fb8e25fab --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java @@ -0,0 +1,109 @@ +package org.springframework.data.mongodb.core.aggregation; + +import org.springframework.util.Assert; + +public class EvaluationOperators { + + /** + * Take the value resulting from the given fieldReference. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link EvaluationOperatorFactory}. + */ + public static EvaluationOperatorFactory valueOf(String fieldReference) { + return new EvaluationOperatorFactory(fieldReference); + } + + /** + * Take the value resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link EvaluationOperatorFactory}. + */ + public static EvaluationOperatorFactory valueOf(AggregationExpression expression) { + return new EvaluationOperatorFactory(expression); + } + + public static class EvaluationOperatorFactory { + + private final String fieldReference; + private final AggregationExpression expression; + + /** + * Creates new {@link EvaluationOperatorFactory} for given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + */ + public EvaluationOperatorFactory(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + this.fieldReference = fieldReference; + this.expression = null; + } + + + /** + * Creates new {@link EvaluationOperatorFactory} for given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public EvaluationOperatorFactory(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null!"); + this.fieldReference = null; + this.expression = expression; + } + + /** + * Creates new {@link AggregationExpression} that is a valid aggregation expression. + * + * @return new instance of {@link Expr}. + */ + public Expr expr() { + return usesFieldRef() ? Expr.valueOf(fieldReference) : Expr.valueOf(expression); + } + + + public static class Expr extends AbstractAggregationExpression { + + private Expr(Object value) { + super(value); + } + + @Override + protected String getMongoMethod() { + return "$expr"; + } + + /** + * Creates new {@link Expr}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Expr}. + */ + public static Expr valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new Expr(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Expr}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Expr}. + */ + public static Expr valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null!"); + return new Expr(expression); + } + + } + + private boolean usesFieldRef() { + return fieldReference != null; + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java index c9d83ae6c8..c2796aaa03 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java @@ -16,6 +16,7 @@ package org.springframework.data.mongodb.core.aggregation; import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.EvaluationOperators.EvaluationOperatorFactory.Expr; import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.util.Assert; @@ -36,7 +37,16 @@ public class MatchOperation implements AggregationOperation { private final CriteriaDefinition criteriaDefinition; - + private final AggregationExpression expression; + + /** + * Creates a new {@link MatchOperation} + */ + public MatchOperation() { + this.criteriaDefinition = null; + this.expression = null; + } + /** * Creates a new {@link MatchOperation} for the given {@link CriteriaDefinition}. * @@ -46,14 +56,39 @@ public MatchOperation(CriteriaDefinition criteriaDefinition) { Assert.notNull(criteriaDefinition, "Criteria must not be null!"); this.criteriaDefinition = criteriaDefinition; + this.expression = null; } - + + /** + * Creates a new {@link MatchOperation} for the given {@link Expression}. + * + * @param criteriaDefinition must not be {@literal null}. + */ + private MatchOperation(Expr expression) { + Assert.notNull(expression, "Expression must not be null!"); + this.criteriaDefinition = null; + this.expression = expression; + } + + /** + * Creates a new {@link MatchOperation} for the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public MatchOperation withValueOf(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); + return new MatchOperation(EvaluationOperators.valueOf(expression).expr()); + } + /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) */ @Override public Document toDocument(AggregationOperationContext context) { + if(expression != null) { + return new Document(getOperator(), expression.toDocument()); + } return new Document(getOperator(), context.getMappedObject(criteriaDefinition.getCriteriaObject())); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperation.java index c452ffb8ea..94f9785595 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperation.java @@ -21,6 +21,7 @@ import java.util.List; import org.bson.Document; + import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; import org.springframework.expression.spel.ast.Projection; import org.springframework.util.Assert; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperation.java index 731668ed3c..d065f81662 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/SetOperation.java @@ -193,5 +193,6 @@ public interface ValueAppender { */ SetOperation withValueOfExpression(String operation, Object... values); } + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java new file mode 100644 index 0000000000..04d3824de1 --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java @@ -0,0 +1,26 @@ +package org.springframework.data.mongodb.core.aggregation; + +import static org.assertj.core.api.Assertions.*; + +import org.bson.Document; +import org.junit.jupiter.api.Test; + +class MatchOperationUnitTests { + + @Test // DATAMONGO - 3729 + public void shouldRenderStdDevPopCorrectly() { + MatchOperation operation = Aggregation.match().withValueOf(ArithmeticOperators.valueOf("quiz").stdDevPop()); + assertThat(operation.toDocument(Aggregation.DEFAULT_CONTEXT)). + isEqualTo(Document.parse("{ $match: { \"$expr\" : { \"$stdDevPop\" : \"$quiz\" } } } ")); + + } + + @Test // DATAMONGO - 3729 + public void shouldRenderStdDevSampCorrectly() { + MatchOperation operation = Aggregation.match().withValueOf(ArithmeticOperators.valueOf("quiz").stdDevSamp()); + assertThat(operation.toDocument(Aggregation.DEFAULT_CONTEXT)). + isEqualTo(Document.parse("{ $match: { \"$expr\" : { \"$stdDevSamp\" : \"$quiz\" } } } ")); + + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetOperationUnitTests.java index b90b049da1..8fd8bd5526 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SetOperationUnitTests.java @@ -21,6 +21,7 @@ import org.bson.Document; import org.junit.jupiter.api.Test; + import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; import org.springframework.data.mongodb.core.convert.QueryMapper; From 34d66a276ac35a24f076d565543803e4392c5880 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Mon, 6 Sep 2021 15:07:02 +0200 Subject: [PATCH 116/983] Polishing. Add license headers. Update Javadoc, author, and since tags. Add tests. Add toCriteriaDefinition method. See #3790 --- .../mongodb/core/aggregation/Aggregation.java | 13 ++-- .../core/aggregation/EvaluationOperators.java | 66 ++++++++++++++++--- .../core/aggregation/MatchOperation.java | 44 +++++-------- .../EvaluationOperatorsUnitTests.java | 35 ++++++++++ .../aggregation/MatchOperationUnitTests.java | 29 ++++---- .../ReplaceRootOperationUnitTests.java | 16 ++--- .../ReplaceWithOperationUnitTests.java | 8 +-- .../core/convert/QueryMapperUnitTests.java | 18 +++++ 8 files changed, 156 insertions(+), 73 deletions(-) create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperatorsUnitTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java index 55964bab93..614489692c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java @@ -498,17 +498,18 @@ public static MatchOperation match(Criteria criteria) { public static MatchOperation match(CriteriaDefinition criteria) { return new MatchOperation(criteria); } - + /** - * Creates a new {@link MatchOperation} + * Creates a new {@link MatchOperation} using the given {@link AggregationExpression}. * + * @param expression must not be {@literal null}. * @return new instance of {@link MatchOperation}. - * @since 1.10 + * @since 3.3 */ - public static MatchOperation match() { - return new MatchOperation(); + public static MatchOperation match(AggregationExpression expression) { + return new MatchOperation(expression); } - + /** * Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The * {@code distanceField} defines output field that contains the calculated distance. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java index 0fb8e25fab..181bab5ef5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperators.java @@ -1,9 +1,33 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.springframework.data.mongodb.core.aggregation; +import org.bson.Document; + +import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.util.Assert; +/** + * Gateway to {@literal evaluation operators} such as {@literal $expr}. + * + * @author Divya Srivastava + * @since 3.3 + */ public class EvaluationOperators { - + /** * Take the value resulting from the given fieldReference. * @@ -13,7 +37,7 @@ public class EvaluationOperators { public static EvaluationOperatorFactory valueOf(String fieldReference) { return new EvaluationOperatorFactory(fieldReference); } - + /** * Take the value resulting from the given {@link AggregationExpression}. * @@ -23,12 +47,12 @@ public static EvaluationOperatorFactory valueOf(String fieldReference) { public static EvaluationOperatorFactory valueOf(AggregationExpression expression) { return new EvaluationOperatorFactory(expression); } - + public static class EvaluationOperatorFactory { - + private final String fieldReference; private final AggregationExpression expression; - + /** * Creates new {@link EvaluationOperatorFactory} for given {@literal fieldReference}. * @@ -41,7 +65,6 @@ public EvaluationOperatorFactory(String fieldReference) { this.expression = null; } - /** * Creates new {@link EvaluationOperatorFactory} for given {@link AggregationExpression}. * @@ -53,7 +76,7 @@ public EvaluationOperatorFactory(AggregationExpression expression) { this.fieldReference = null; this.expression = expression; } - + /** * Creates new {@link AggregationExpression} that is a valid aggregation expression. * @@ -62,8 +85,10 @@ public EvaluationOperatorFactory(AggregationExpression expression) { public Expr expr() { return usesFieldRef() ? Expr.valueOf(fieldReference) : Expr.valueOf(expression); } - - + + /** + * Allows the use of aggregation expressions within the query language. + */ public static class Expr extends AbstractAggregationExpression { private Expr(Object value) { @@ -99,8 +124,29 @@ public static Expr valueOf(AggregationExpression expression) { return new Expr(expression); } + /** + * Creates {@code $expr} as {@link CriteriaDefinition}. + * + * @return the {@link CriteriaDefinition} from this expression. + */ + public CriteriaDefinition toCriteriaDefinition(AggregationOperationContext context) { + + Document criteriaObject = toDocument(context); + + return new CriteriaDefinition() { + @Override + public Document getCriteriaObject() { + return criteriaObject; + } + + @Override + public String getKey() { + return getMongoMethod(); + } + }; + } } - + private boolean usesFieldRef() { return fieldReference != null; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java index c2796aaa03..c3d1f366ec 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/MatchOperation.java @@ -16,7 +16,7 @@ package org.springframework.data.mongodb.core.aggregation; import org.bson.Document; -import org.springframework.data.mongodb.core.aggregation.EvaluationOperators.EvaluationOperatorFactory.Expr; + import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.util.Assert; @@ -30,6 +30,7 @@ * @author Sebastian Herold * @author Thomas Darimont * @author Oliver Gierke + * @author Divya Srivastava * @since 1.3 * @see MongoDB Aggregation Framework: * $match @@ -38,15 +39,7 @@ public class MatchOperation implements AggregationOperation { private final CriteriaDefinition criteriaDefinition; private final AggregationExpression expression; - - /** - * Creates a new {@link MatchOperation} - */ - public MatchOperation() { - this.criteriaDefinition = null; - this.expression = null; - } - + /** * Creates a new {@link MatchOperation} for the given {@link CriteriaDefinition}. * @@ -55,41 +48,34 @@ public MatchOperation() { public MatchOperation(CriteriaDefinition criteriaDefinition) { Assert.notNull(criteriaDefinition, "Criteria must not be null!"); + this.criteriaDefinition = criteriaDefinition; this.expression = null; } - - /** - * Creates a new {@link MatchOperation} for the given {@link Expression}. - * - * @param criteriaDefinition must not be {@literal null}. - */ - private MatchOperation(Expr expression) { - Assert.notNull(expression, "Expression must not be null!"); - this.criteriaDefinition = null; - this.expression = expression; - } - + /** * Creates a new {@link MatchOperation} for the given {@link AggregationExpression}. * * @param expression must not be {@literal null}. + * @since 3.3 */ - public MatchOperation withValueOf(AggregationExpression expression) { + public MatchOperation(AggregationExpression expression) { + Assert.notNull(expression, "Expression must not be null!"); - return new MatchOperation(EvaluationOperators.valueOf(expression).expr()); + + this.criteriaDefinition = null; + this.expression = expression; } - + /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) */ @Override public Document toDocument(AggregationOperationContext context) { - if(expression != null) { - return new Document(getOperator(), expression.toDocument()); - } - return new Document(getOperator(), context.getMappedObject(criteriaDefinition.getCriteriaObject())); + + return new Document(getOperator(), + context.getMappedObject(expression != null ? expression.toDocument() : criteriaDefinition.getCriteriaObject())); } /* diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperatorsUnitTests.java new file mode 100644 index 0000000000..67f5093b8f --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/EvaluationOperatorsUnitTests.java @@ -0,0 +1,35 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import static org.springframework.data.mongodb.test.util.Assertions.*; + +import org.junit.jupiter.api.Test; + +/** + * Unit tests for {@link EvaluationOperators}. + * + * @author Mark Paluch + */ +class EvaluationOperatorsUnitTests { + + @Test // GH-3790 + void shouldRenderExprCorrectly() { + + assertThat(EvaluationOperators.valueOf("foo").expr().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $expr: \"$foo\" }"); + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java index 04d3824de1..ec3decb7a8 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/MatchOperationUnitTests.java @@ -1,26 +1,23 @@ package org.springframework.data.mongodb.core.aggregation; -import static org.assertj.core.api.Assertions.*; -import org.bson.Document; +import static org.springframework.data.mongodb.test.util.Assertions.*; + import org.junit.jupiter.api.Test; +/** + * Unit tests for {@link MatchOperation}. + * + * @author Divya Srivastava + */ class MatchOperationUnitTests { - - @Test // DATAMONGO - 3729 - public void shouldRenderStdDevPopCorrectly() { - MatchOperation operation = Aggregation.match().withValueOf(ArithmeticOperators.valueOf("quiz").stdDevPop()); - assertThat(operation.toDocument(Aggregation.DEFAULT_CONTEXT)). - isEqualTo(Document.parse("{ $match: { \"$expr\" : { \"$stdDevPop\" : \"$quiz\" } } } ")); - - } - - @Test // DATAMONGO - 3729 - public void shouldRenderStdDevSampCorrectly() { - MatchOperation operation = Aggregation.match().withValueOf(ArithmeticOperators.valueOf("quiz").stdDevSamp()); + + @Test // GH-3790 + void matchShouldRenderCorrectly() { + + MatchOperation operation = Aggregation.match(ArithmeticOperators.valueOf("quiz").stdDevPop()); assertThat(operation.toDocument(Aggregation.DEFAULT_CONTEXT)). - isEqualTo(Document.parse("{ $match: { \"$expr\" : { \"$stdDevSamp\" : \"$quiz\" } } } ")); - + isEqualTo("{ $match: { \"$stdDevPop\" : \"$quiz\" } } "); } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperationUnitTests.java index e97e1ff018..9fbc36586f 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceRootOperationUnitTests.java @@ -27,20 +27,20 @@ * * @author Mark Paluch */ -public class ReplaceRootOperationUnitTests { +class ReplaceRootOperationUnitTests { @Test // DATAMONGO-1550 - public void rejectsNullField() { + void rejectsNullField() { assertThatIllegalArgumentException().isThrownBy(() -> new ReplaceRootOperation((Field) null)); } @Test // DATAMONGO-1550 - public void rejectsNullExpression() { + void rejectsNullExpression() { assertThatIllegalArgumentException().isThrownBy(() -> new ReplaceRootOperation((AggregationExpression) null)); } @Test // DATAMONGO-1550 - public void shouldRenderCorrectly() { + void shouldRenderCorrectly() { ReplaceRootOperation operation = ReplaceRootDocumentOperation.builder() .withDocument(new Document("hello", "world")); @@ -50,7 +50,7 @@ public void shouldRenderCorrectly() { } @Test // DATAMONGO-1550 - public void shouldRenderExpressionCorrectly() { + void shouldRenderExpressionCorrectly() { ReplaceRootOperation operation = new ReplaceRootOperation(VariableOperators // .mapItemsOf("array") // @@ -64,7 +64,7 @@ public void shouldRenderExpressionCorrectly() { } @Test // DATAMONGO-1550 - public void shouldComposeDocument() { + void shouldComposeDocument() { ReplaceRootOperation operation = ReplaceRootDocumentOperation.builder().withDocument() // .andValue("value").as("key") // @@ -77,7 +77,7 @@ public void shouldComposeDocument() { } @Test // DATAMONGO-1550 - public void shouldComposeSubDocument() { + void shouldComposeSubDocument() { Document partialReplacement = new Document("key", "override").append("key2", "value2"); @@ -92,7 +92,7 @@ public void shouldComposeSubDocument() { } @Test // DATAMONGO-1550 - public void shouldNotExposeFields() { + void shouldNotExposeFields() { ReplaceRootOperation operation = new ReplaceRootOperation(Fields.field("field")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperationUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperationUnitTests.java index 8f8b5c9dd1..d1a21a254c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperationUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ReplaceWithOperationUnitTests.java @@ -25,15 +25,15 @@ * * @author Christoph Strobl */ -public class ReplaceWithOperationUnitTests { +class ReplaceWithOperationUnitTests { @Test // DATAMONGO-2331 - public void rejectsNullField() { + void rejectsNullField() { assertThatIllegalArgumentException().isThrownBy(() -> new ReplaceWithOperation(null)); } @Test // DATAMONGO-2331 - public void shouldRenderValueCorrectly() { + void shouldRenderValueCorrectly() { ReplaceWithOperation operation = ReplaceWithOperation.replaceWithValue(new Document("hello", "world")); Document dbObject = operation.toDocument(Aggregation.DEFAULT_CONTEXT); @@ -42,7 +42,7 @@ public void shouldRenderValueCorrectly() { } @Test // DATAMONGO-2331 - public void shouldRenderExpressionCorrectly() { + void shouldRenderExpressionCorrectly() { ReplaceWithOperation operation = ReplaceWithOperation.replaceWithValueOf(VariableOperators // .mapItemsOf("array") // diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index 808263697a..46db6e7d6a 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -43,6 +43,9 @@ import org.springframework.data.geo.Point; import org.springframework.data.mongodb.core.DocumentTestUtils; import org.springframework.data.mongodb.core.Person; +import org.springframework.data.mongodb.core.aggregation.ConditionalOperators; +import org.springframework.data.mongodb.core.aggregation.EvaluationOperators; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.geo.GeoJsonPoint; import org.springframework.data.mongodb.core.geo.GeoJsonPolygon; import org.springframework.data.mongodb.core.mapping.DBRef; @@ -1330,6 +1333,21 @@ void allowsUsingFieldPathsForPropertiesHavingCustomConversionRegistered() { assertThat(mapper.getMappedSort(query.getQueryObject(), context.getPersistentEntity(Customer.class))).isEqualTo(new org.bson.Document("address.street", "1007 Mountain Drive")); } + @Test // GH-3790 + void shouldAcceptExprAsCriteriaDefinition() { + + EvaluationOperators.EvaluationOperatorFactory.Expr expr = EvaluationOperators + .valueOf(ConditionalOperators.ifNull("customizedField").then(true)).expr(); + + Query query = query( + expr.toCriteriaDefinition(new TypeBasedAggregationOperationContext(EmbeddedClass.class, context, mapper))); + + org.bson.Document mappedQuery = mapper.getMappedObject(query.getQueryObject(), + context.getRequiredPersistentEntity(EmbeddedClass.class)); + + assertThat(mappedQuery).isEqualTo("{ $expr : { $ifNull : [\"$fancy_custom_name\", true] } }"); + } + @Test // GH-3668 void mapStringIdFieldProjection() { From ffceed8da96bb2b83206a87440bebc8d30687c10 Mon Sep 17 00:00:00 2001 From: divya srivastava Date: Sun, 29 Aug 2021 19:07:14 +0530 Subject: [PATCH 117/983] Add support for `$atan`, `$atan2` and `$atanh` aggregation operators. Closes #3709 Original pull request: #3794. --- .../core/aggregation/ArithmeticOperators.java | 258 ++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 3 + .../data/mongodb/util/RegexFlags.java | 2 +- .../ArithmeticOperatorsUnitTests.java | 22 ++ .../SpelExpressionTransformerUnitTests.java | 15 + 5 files changed, 299 insertions(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 8fe3d9120c..bf10488f99 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -790,6 +790,68 @@ public Cosh cosh(AngularUnit unit) { public Tan tan() { return tan(AngularUnit.RADIANS); } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of a numeric value. + * + * @return new instance of {@link ATan}. + */ + public ATan atan() { + return usesFieldRef() ? ATan.atanOf(fieldReference) : ATan.atanOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value + * divided by the given numeric value in the argument. + * + * @param the numeric value + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2(Number value) { + + Assert.notNull(value, "Value must not be null!"); + return createATan2().atan2of(value); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value + * divided by the given field reference in the argument. + * + * @param the numeric value + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return createATan2().atan2of(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value + * divided by the given {@link AggregationExpression} in the argument. + * + * @param the numeric value + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null!"); + return createATan2().atan2of(expression); + } + + private ATan2 createATan2() { + + return usesFieldRef() ? ATan2.valueOf(fieldReference) : ATan2.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a numeric value. + * + * @return new instance of {@link ATanh}. + */ + public ATanh atanh() { + return usesFieldRef() ? ATanh.atanhOf(fieldReference) : ATanh.atanhOf(expression); + } /** * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value in the given @@ -2579,6 +2641,148 @@ protected String getMongoMethod() { return "$tan"; } } + + + /** + * An {@link AggregationExpression expression} that calculates the inverse tangent of a value. + * + */ + public static class ATan extends AbstractAggregationExpression { + + private ATan(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new ATan(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + *

        + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanOf(AggregationExpression expression) { + return new ATan(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanof(Number value) { + return new ATan(value); + } + + @Override + protected String getMongoMethod() { + return "$atan"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse + * tangent of y / x, where y and x are the first and second values passed to the + * expression respectively. + * + */ + public static class ATan2 extends AbstractAggregationExpression { + + private ATan2(List value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * tangent of of y / x, where y and x are the first and second values passed to + * the expression respectively. + * + * @param fieldReference the name of the {@link Field field} that resolves to a + * numeric value. + * @return new instance of {@link ATan2}. + */ + public static ATan2 valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new ATan2(asFields(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * tangent of of y / x, where y and x are the first and second values passed to + * the expression respectively. + * + * @param expression the {@link AggregationExpression expression} that resolves + * to a numeric value. + * @return new instance of {@link ATan2}. + */ + public static ATan2 valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null!"); + return new ATan2((Collections.singletonList(expression))); + } + + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * tangent of of y / x, where y and x are the first and second values passed to + * the expression respectively. + * + * @param value anything ({@link Field field}, {@link AggregationExpression + * expression}, ...) that resolves to a numeric value. + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new ATan2(append(Fields.field(fieldReference))); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null!"); + return new ATan2(append(expression)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * tangent of of y / x, where y and x are the first and second values passed to + * the expression respectively. + * + * @param value of type {@link Number} + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(Number value) { + + return new ATan2(append(value)); + } + + @Override + protected String getMongoMethod() { + return "$atan2"; + } + } /** * An {@link AggregationExpression expression} that calculates the hyperbolic tangent of a value that is measured in @@ -2684,6 +2888,60 @@ protected String getMongoMethod() { return "$tanh"; } } + + /** + * An {@link AggregationExpression expression} that calculates the inverse + * hyperbolic tangent of a value + * + */ + public static class ATanh extends AbstractAggregationExpression { + + private ATanh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * hyperbolic tangent of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a + * numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhOf(String fieldReference) { + return new ATanh(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * hyperbolic tangent of a value. + *

        + * + * @param expression the {@link AggregationExpression expression} that resolves + * to a numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhOf(AggregationExpression expression) { + return new ATanh(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse + * hyperbolic tangent of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression + * expression}, ...) that resolves to a numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhof(Object value) { + return new ATanh(value); + } + + @Override + protected String getMongoMethod() { + return "$atanh"; + } + } /** * {@link Rand} returns a floating value between 0 and 1. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 0fbfe51f09..0f27c463e2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -100,6 +100,9 @@ public class MethodReferenceNode extends ExpressionNode { map.put("tan", singleArgRef().forOperator("$tan")); map.put("tanh", singleArgRef().forOperator("$tanh")); map.put("rand", emptyRef().forOperator("$rand")); + map.put("atan", singleArgRef().forOperator("$atan")); + map.put("atan2", arrayArgRef().forOperator("$atan2")); + map.put("atanh", singleArgRef().forOperator("$atanh")); // STRING OPERATORS map.put("concat", arrayArgRef().forOperator("$concat")); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java index dfee94954c..ba6531e93c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/RegexFlags.java @@ -113,4 +113,4 @@ public static int toRegexFlag(char c) { return flag; } -} +} \ No newline at end of file diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index 02f76d5c10..8a52a8a2f5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -166,6 +166,28 @@ void rendersTanhWithValueInDegrees() { assertThat(valueOf("angle").tanh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo("{ $tanh : { $degreesToRadians : \"$angle\" } }"); } + + @Test // DATAMONGO - 3709 + void rendersATan() { + + assertThat(valueOf("field").atan().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $atan : \"$field\" }"); + } + + @Test // DATAMONGO - 3709 + void rendersATan2() { + + assertThat(valueOf("field1").atan2("field2").toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $atan2 : [ \"$field1\" , \"$field2\" ] }"); + } + + @Test // DATAMONGO - 3709 + void rendersATanh() { + + assertThat(valueOf("field").atanh().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $atanh : \"$field\" }"); + } + @Test // GH-3724 void rendersRand() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index e92ea38336..c9ba9c12e7 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1098,6 +1098,21 @@ void shouldRenderTan() { void shouldRenderTanh() { assertThat(transform("tanh(angle)")).isEqualTo("{ \"$tanh\" : \"$angle\"}"); } + + @Test // DATAMONGO - 3709 + void shouldRenderATan() { + assertThat(transform("atan(number)")).isEqualTo("{ \"$atan\" : \"$number\"}"); + } + + @Test // DATAMONGO - 3709 + void shouldRenderATan2() { + assertThat(transform("atan2(number1,number2)")).isEqualTo("{ \"$atan2\" : [ \"$number1\" , \"$number2\" ] }"); + } + + @Test // DATAMONGO - 3709 + void shouldRenderATanh() { + assertThat(transform("atanh(number)")).isEqualTo("{ \"$atanh\" : \"$number\"}"); + } @Test // GH-3713 void shouldRenderDateAdd() { From 8af904b81fb190e6b9663629daff76b11ed8028f Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Mon, 6 Sep 2021 15:46:07 +0200 Subject: [PATCH 118/983] Polishing. Add author and since tags. Tweak Javadoc format. See #3709 Original pull request: #3794. --- .../core/aggregation/ArithmeticOperators.java | 155 +++++++++--------- .../ArithmeticOperatorsUnitTests.java | 46 +++--- .../reference/aggregation-framework.adoc | 2 +- 3 files changed, 93 insertions(+), 110 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index bf10488f99..d21d985882 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -717,7 +717,7 @@ public Sin sin(AngularUnit unit) { * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in * {@link AngularUnit#RADIANS radians}. * - * @return new instance of {@link Sin}. + * @return new instance of {@link Sinh}. * @since 3.3 */ public Sinh sinh() { @@ -728,7 +728,7 @@ public Sinh sinh() { * Creates new {@link AggregationExpression} that calculates the sine of a numeric value. * * @param unit the unit of measure. - * @return new instance of {@link Sin}. + * @return new instance of {@link Sinh}. * @since 3.3 */ public Sinh sinh(AngularUnit unit) { @@ -739,7 +739,7 @@ public Sinh sinh(AngularUnit unit) { * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value given in * {@link AngularUnit#RADIANS radians}. * - * @return new instance of {@link Sin}. + * @return new instance of {@link Cos}. * @since 3.3 */ public Cos cos() { @@ -751,7 +751,7 @@ public Cos cos() { * {@link AngularUnit unit}. * * @param unit the unit of measure. - * @return new instance of {@link Sin}. + * @return new instance of {@link Cos}. * @since 3.3 */ public Cos cos(AngularUnit unit) { @@ -762,7 +762,7 @@ public Cos cos(AngularUnit unit) { * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value given in * {@link AngularUnit#RADIANS radians}. * - * @return new instance of {@link Sin}. + * @return new instance of {@link Cosh}. * @since 3.3 */ public Cosh cosh() { @@ -773,7 +773,7 @@ public Cosh cosh() { * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value. * * @param unit the unit of measure. - * @return new instance of {@link Sin}. + * @return new instance of {@link Cosh}. * @since 3.3 */ public Cosh cosh(AngularUnit unit) { @@ -784,70 +784,75 @@ public Cosh cosh(AngularUnit unit) { * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value given in * {@link AngularUnit#RADIANS radians}. * - * @return new instance of {@link Sin}. + * @return new instance of {@link Tan}. * @since 3.3 */ public Tan tan() { return tan(AngularUnit.RADIANS); } - + /** * Creates new {@link AggregationExpression} that calculates the inverse tangent of a numeric value. * * @return new instance of {@link ATan}. + * @since 3.3 */ public ATan atan() { return usesFieldRef() ? ATan.atanOf(fieldReference) : ATan.atanOf(expression); } - + /** - * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value - * divided by the given numeric value in the argument. + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given numeric value in the argument. * - * @param the numeric value + * @param the numeric value * @return new instance of {@link ATan2}. + * @since 3.3 */ public ATan2 atan2(Number value) { - + Assert.notNull(value, "Value must not be null!"); return createATan2().atan2of(value); } - + /** - * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value - * divided by the given field reference in the argument. + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given field reference in the argument. * - * @param the numeric value + * @param the numeric value * @return new instance of {@link ATan2}. + * @since 3.3 */ public ATan2 atan2(String fieldReference) { - + Assert.notNull(fieldReference, "FieldReference must not be null!"); return createATan2().atan2of(fieldReference); } - + /** - * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value - * divided by the given {@link AggregationExpression} in the argument. + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given {@link AggregationExpression} in the argument. * - * @param the numeric value + * @param the numeric value * @return new instance of {@link ATan2}. + * @since 3.3 */ public ATan2 atan2(AggregationExpression expression) { - + Assert.notNull(expression, "Expression must not be null!"); return createATan2().atan2of(expression); } - + private ATan2 createATan2() { - + return usesFieldRef() ? ATan2.valueOf(fieldReference) : ATan2.valueOf(expression); } - + /** * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a numeric value. * * @return new instance of {@link ATanh}. + * @since 3.3 */ public ATanh atanh() { return usesFieldRef() ? ATanh.atanhOf(fieldReference) : ATanh.atanhOf(expression); @@ -858,7 +863,7 @@ public ATanh atanh() { * {@link AngularUnit unit}. * * @param unit the unit of measure. - * @return new instance of {@link Sin}. + * @return new instance of {@link Tan}. * @since 3.3 */ public Tan tan(AngularUnit unit) { @@ -869,7 +874,7 @@ public Tan tan(AngularUnit unit) { * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value given in * {@link AngularUnit#RADIANS radians}. * - * @return new instance of {@link Sin}. + * @return new instance of {@link Tan}. * @since 3.3 */ public Tanh tanh() { @@ -880,7 +885,7 @@ public Tanh tanh() { * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value. * * @param unit the unit of measure. - * @return new instance of {@link Sin}. + * @return new instance of {@link Tanh}. * @since 3.3 */ public Tanh tanh(AngularUnit unit) { @@ -2357,8 +2362,6 @@ private Cos(Object value) { * { $cos : { $degreesToRadians : "$angle" } } * * - * . - * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link Cos}. */ @@ -2470,8 +2473,6 @@ public static Cosh coshOf(String fieldReference) { * { $cosh : { $degreesToRadians : "$angle" } } * * - * . - * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Cosh}. @@ -2563,8 +2564,6 @@ private Tan(Object value) { * { $tan : { $degreesToRadians : "$angle" } } * * - * . - * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link Tan}. */ @@ -2641,11 +2640,12 @@ protected String getMongoMethod() { return "$tan"; } } - - + /** * An {@link AggregationExpression expression} that calculates the inverse tangent of a value. * + * @author Divya Srivastava + * @since 3.3 */ public static class ATan extends AbstractAggregationExpression { @@ -2660,14 +2660,13 @@ private ATan(Object value) { * @return new instance of {@link ATan}. */ public static ATan atanOf(String fieldReference) { - + Assert.notNull(fieldReference, "FieldReference must not be null!"); return new ATan(Fields.field(fieldReference)); } /** * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. - *

        * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link ATan}. @@ -2683,7 +2682,7 @@ public static ATan atanOf(AggregationExpression expression) { * numeric value. * @return new instance of {@link ATan}. */ - public static ATan atanof(Number value) { + public static ATan atanOf(Number value) { return new ATan(value); } @@ -2692,26 +2691,25 @@ protected String getMongoMethod() { return "$atan"; } } - + /** - * An {@link AggregationExpression expression} that calculates the inverse - * tangent of y / x, where y and x are the first and second values passed to the - * expression respectively. + * An {@link AggregationExpression expression} that calculates the inverse tangent of y / x, where y and x are the + * first and second values passed to the expression respectively. * + * @author Divya Srivastava + * @since 3.3 */ public static class ATan2 extends AbstractAggregationExpression { - + private ATan2(List value) { super(value); } /** - * Creates a new {@link AggregationExpression} that calculates the inverse - * tangent of of y / x, where y and x are the first and second values passed to - * the expression respectively. - * - * @param fieldReference the name of the {@link Field field} that resolves to a - * numeric value. + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @return new instance of {@link ATan2}. */ public static ATan2 valueOf(String fieldReference) { @@ -2721,12 +2719,10 @@ public static ATan2 valueOf(String fieldReference) { } /** - * Creates a new {@link AggregationExpression} that calculates the inverse - * tangent of of y / x, where y and x are the first and second values passed to - * the expression respectively. - * - * @param expression the {@link AggregationExpression expression} that resolves - * to a numeric value. + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link ATan2}. */ public static ATan2 valueOf(AggregationExpression expression) { @@ -2737,12 +2733,11 @@ public static ATan2 valueOf(AggregationExpression expression) { /** - * Creates a new {@link AggregationExpression} that calculates the inverse - * tangent of of y / x, where y and x are the first and second values passed to - * the expression respectively. - * - * @param value anything ({@link Field field}, {@link AggregationExpression - * expression}, ...) that resolves to a numeric value. + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. * @return new instance of {@link ATan2}. */ public ATan2 atan2of(String fieldReference) { @@ -2750,7 +2745,7 @@ public ATan2 atan2of(String fieldReference) { Assert.notNull(fieldReference, "FieldReference must not be null!"); return new ATan2(append(Fields.field(fieldReference))); } - + /** * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in * {@link AngularUnit#RADIANS}. @@ -2760,21 +2755,20 @@ public ATan2 atan2of(String fieldReference) { * @return new instance of {@link ATan2}. */ public ATan2 atan2of(AggregationExpression expression) { - + Assert.notNull(expression, "Expression must not be null!"); return new ATan2(append(expression)); } - + /** - * Creates a new {@link AggregationExpression} that calculates the inverse - * tangent of of y / x, where y and x are the first and second values passed to - * the expression respectively. - * + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * * @param value of type {@link Number} * @return new instance of {@link ATan2}. */ public ATan2 atan2of(Number value) { - + return new ATan2(append(value)); } @@ -2818,8 +2812,6 @@ public static Tanh tanhOf(String fieldReference) { * { $tanh : { $degreesToRadians : "$angle" } } * * - * . - * * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. * @param unit the unit of measure used by the value of the given field. * @return new instance of {@link Tanh}. @@ -2888,11 +2880,12 @@ protected String getMongoMethod() { return "$tanh"; } } - + /** - * An {@link AggregationExpression expression} that calculates the inverse - * hyperbolic tangent of a value + * An {@link AggregationExpression expression} that calculates the inverse hyperbolic tangent of a value * + * @author Divya Srivastava + * @since 3.3 */ public static class ATanh extends AbstractAggregationExpression { @@ -2913,12 +2906,10 @@ public static ATanh atanhOf(String fieldReference) { } /** - * Creates a new {@link AggregationExpression} that calculates the inverse - * hyperbolic tangent of a value. + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a value. *

        - * - * @param expression the {@link AggregationExpression expression} that resolves - * to a numeric value. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link ATanh}. */ public static ATanh atanhOf(AggregationExpression expression) { @@ -2933,7 +2924,7 @@ public static ATanh atanhOf(AggregationExpression expression) { * expression}, ...) that resolves to a numeric value. * @return new instance of {@link ATanh}. */ - public static ATanh atanhof(Object value) { + public static ATanh atanhOf(Object value) { return new ATanh(value); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index 8a52a8a2f5..84d228f75e 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -30,6 +30,7 @@ * @author Christoph Strobl * @author Mark Paluch * @author Mushtaq Ahmed + * @author Divya Srivastava */ class ArithmeticOperatorsUnitTests { @@ -86,8 +87,7 @@ void rendersIntegralWithUnit() { @Test // GH-3728 void rendersSin() { - assertThat(valueOf("angle").sin().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $sin : \"$angle\" }"); + assertThat(valueOf("angle").sin().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $sin : \"$angle\" }"); } @Test // GH-3728 @@ -100,8 +100,7 @@ void rendersSinWithValueInDegrees() { @Test // GH-3728 void rendersSinh() { - assertThat(valueOf("angle").sinh().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $sinh : \"$angle\" }"); + assertThat(valueOf("angle").sinh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $sinh : \"$angle\" }"); } @Test // GH-3728 @@ -114,8 +113,7 @@ void rendersSinhWithValueInDegrees() { @Test // GH-3710 void rendersCos() { - assertThat(valueOf("angle").cos().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $cos : \"$angle\" }"); + assertThat(valueOf("angle").cos().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $cos : \"$angle\" }"); } @Test // GH-3710 @@ -128,8 +126,7 @@ void rendersCosWithValueInDegrees() { @Test // GH-3710 void rendersCosh() { - assertThat(valueOf("angle").cosh().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $cosh : \"$angle\" }"); + assertThat(valueOf("angle").cosh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $cosh : \"$angle\" }"); } @Test // GH-3710 @@ -142,8 +139,7 @@ void rendersCoshWithValueInDegrees() { @Test // GH-3730 void rendersTan() { - assertThat(valueOf("angle").tan().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $tan : \"$angle\" }"); + assertThat(valueOf("angle").tan().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $tan : \"$angle\" }"); } @Test // GH-3730 @@ -156,8 +152,7 @@ void rendersTanWithValueInDegrees() { @Test // GH-3730 void rendersTanh() { - assertThat(valueOf("angle").tanh().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $tanh : \"$angle\" }"); + assertThat(valueOf("angle").tanh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $tanh : \"$angle\" }"); } @Test // GH-3730 @@ -166,28 +161,25 @@ void rendersTanhWithValueInDegrees() { assertThat(valueOf("angle").tanh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo("{ $tanh : { $degreesToRadians : \"$angle\" } }"); } - - @Test // DATAMONGO - 3709 + + @Test // GH-3709 void rendersATan() { - - assertThat(valueOf("field").atan().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $atan : \"$field\" }"); + + assertThat(valueOf("field").atan().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $atan : \"$field\" }"); } - - @Test // DATAMONGO - 3709 + + @Test // GH-3709 void rendersATan2() { - + assertThat(valueOf("field1").atan2("field2").toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $atan2 : [ \"$field1\" , \"$field2\" ] }"); + .isEqualTo("{ $atan2 : [ \"$field1\" , \"$field2\" ] }"); } - - @Test // DATAMONGO - 3709 + + @Test // GH-3709 void rendersATanh() { - - assertThat(valueOf("field").atanh().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $atanh : \"$field\" }"); - } + assertThat(valueOf("field").atanh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $atanh : \"$field\" }"); + } @Test // GH-3724 void rendersRand() { diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index 75ed415096..387a0acf65 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -85,7 +85,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `count` (+++*+++), `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators -| `abs`, `add` (+++*+++ via `plus`), `ceil`, `cos`, `cosh`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `sin`, `sinh`, `tan`, `tanh`, `trunc` +| `abs`, `add` (+++*+++ via `plus`), `atan`, `atan2`, `atanh`, `ceil`, `cos`, `cosh`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `sin`, `sinh`, `tan`, `tanh`, `trunc` | String Aggregation Operators | `concat`, `substr`, `toLower`, `toUpper`, `strcasecmp`, `indexOfBytes`, `indexOfCP`, `regexFind`, `regexFindAll`, `regexMatch`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` From 59d0042d13a8af35a84a48f992c190bed00006b0 Mon Sep 17 00:00:00 2001 From: divyajnu08 Date: Wed, 1 Sep 2021 13:02:43 +0530 Subject: [PATCH 119/983] Add support for `$asin` and `$asinh` aggregation operators. Closes #3708 Original pull request: #3796. --- .../core/aggregation/ArithmeticOperators.java | 116 ++++++++++++++++++ .../core/spel/MethodReferenceNode.java | 2 + .../ArithmeticOperatorsUnitTests.java | 14 +++ .../SpelExpressionTransformerUnitTests.java | 10 ++ 4 files changed, 142 insertions(+) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index d21d985882..d865d57a7d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -735,6 +735,24 @@ public Sinh sinh(AngularUnit unit) { return usesFieldRef() ? Sinh.sinhOf(fieldReference, unit) : Sinh.sinhOf(expression, unit); } + /** + * Creates new {@link AggregationExpression} that calculates the inverse sine of a numeric value. + * + * @return new instance of {@link ASin}. + */ + public ASin asin() { + return usesFieldRef() ? ASin.asinOf(fieldReference) : ASin.asinOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a numeric value. + * + * @return new instance of {@link ASinh}. + */ + public ASinh asinh() { + return usesFieldRef() ? ASinh.asinhOf(fieldReference) : ASinh.asinhOf(expression); + } + /** * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value given in * {@link AngularUnit#RADIANS radians}. @@ -2339,6 +2357,104 @@ protected String getMongoMethod() { return "$sinh"; } } + + /** + * An {@link AggregationExpression expression} that calculates the inverse sine of a value. + * + */ + public static class ASin extends AbstractAggregationExpression { + + private ASin(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null!"); + return new ASin(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. + *

        + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(AggregationExpression expression) { + return new ASin(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(Number value) { + return new ASin(value); + } + + @Override + protected String getMongoMethod() { + return "$asin"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse hyperbolic sine of a value + */ + public static class ASinh extends AbstractAggregationExpression { + + private ASinh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(String fieldReference) { + return new ASinh(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. + *

        + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(AggregationExpression expression) { + return new ASinh(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(Object value) { + return new ASinh(value); + } + + @Override + protected String getMongoMethod() { + return "$asinh"; + } + } + /** * An {@link AggregationExpression expression} that calculates the cosine of a value that is measured in radians. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java index 0f27c463e2..dc7a3cc982 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/spel/MethodReferenceNode.java @@ -95,6 +95,8 @@ public class MethodReferenceNode extends ExpressionNode { map.put("integral", mapArgRef().forOperator("$integral").mappingParametersTo("input", "unit")); map.put("sin", singleArgRef().forOperator("$sin")); map.put("sinh", singleArgRef().forOperator("$sinh")); + map.put("asin", singleArgRef().forOperator("$asin")); + map.put("asinh", singleArgRef().forOperator("$asinh")); map.put("cos", singleArgRef().forOperator("$cos")); map.put("cosh", singleArgRef().forOperator("$cosh")); map.put("tan", singleArgRef().forOperator("$tan")); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index 84d228f75e..d0f50d2baf 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -109,6 +109,20 @@ void rendersSinhWithValueInDegrees() { assertThat(valueOf("angle").sinh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo("{ $sinh : { $degreesToRadians : \"$angle\" } }"); } + + @Test // DATAMONGO - 3708 + void rendersASin() { + + assertThat(valueOf("field").asin().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $asin : \"$field\" }"); + } + + @Test // DATAMONGO - 3708 + void rendersASinh() { + + assertThat(valueOf("field").asinh().toDocument(Aggregation.DEFAULT_CONTEXT)) + .isEqualTo("{ $asinh : \"$field\" }"); + } @Test // GH-3710 void rendersCos() { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index c9ba9c12e7..8077f604e5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1078,6 +1078,16 @@ void shouldRenderSin() { void shouldRenderSinh() { assertThat(transform("sinh(angle)")).isEqualTo("{ \"$sinh\" : \"$angle\"}"); } + + @Test // DATAMONGO-3708 + void shouldRenderASin() { + assertThat(transform("asin(number)")).isEqualTo("{ \"$asin\" : \"$number\"}"); + } + + @Test // DATAMONGO-3708 + void shouldRenderASinh() { + assertThat(transform("asinh(number)")).isEqualTo("{ \"$asinh\" : \"$number\"}"); + } @Test // GH-3710 void shouldRenderCos() { From dcf184888e88f1ae4a205df15b04b4b7d63a0880 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 7 Sep 2021 09:56:18 +0200 Subject: [PATCH 120/983] Polishing. Add since and author tags. Update reference docs. Fix format of ticket references in tests. See #3708 Original pull request: #3796. --- .../core/aggregation/ArithmeticOperators.java | 18 ++++++++++++------ .../ArithmeticOperatorsUnitTests.java | 16 ++++++---------- .../SpelExpressionTransformerUnitTests.java | 12 ++++++------ .../reference/aggregation-framework.adoc | 2 +- 4 files changed, 25 insertions(+), 23 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index d865d57a7d..9c9132e679 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -739,20 +739,22 @@ public Sinh sinh(AngularUnit unit) { * Creates new {@link AggregationExpression} that calculates the inverse sine of a numeric value. * * @return new instance of {@link ASin}. + * @since 3.3 */ public ASin asin() { return usesFieldRef() ? ASin.asinOf(fieldReference) : ASin.asinOf(expression); } - + /** * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a numeric value. * * @return new instance of {@link ASinh}. + * @since 3.3 */ public ASinh asinh() { return usesFieldRef() ? ASinh.asinhOf(fieldReference) : ASinh.asinhOf(expression); } - + /** * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value given in * {@link AngularUnit#RADIANS radians}. @@ -2357,10 +2359,12 @@ protected String getMongoMethod() { return "$sinh"; } } - + /** * An {@link AggregationExpression expression} that calculates the inverse sine of a value. * + * @author Divya Srivastava + * @since 3.3 */ public static class ASin extends AbstractAggregationExpression { @@ -2407,9 +2411,12 @@ protected String getMongoMethod() { return "$asin"; } } - + /** * An {@link AggregationExpression expression} that calculates the inverse hyperbolic sine of a value + * + * @author Divya Srivastava + * @since 3.3 */ public static class ASinh extends AbstractAggregationExpression { @@ -2430,7 +2437,7 @@ public static ASinh asinhOf(String fieldReference) { /** * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. *

        - * + * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link ASinh}. */ @@ -2884,7 +2891,6 @@ public ATan2 atan2of(AggregationExpression expression) { * @return new instance of {@link ATan2}. */ public ATan2 atan2of(Number value) { - return new ATan2(append(value)); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java index d0f50d2baf..ab3d1c2400 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperatorsUnitTests.java @@ -109,19 +109,15 @@ void rendersSinhWithValueInDegrees() { assertThat(valueOf("angle").sinh(AngularUnit.DEGREES).toDocument(Aggregation.DEFAULT_CONTEXT)) .isEqualTo("{ $sinh : { $degreesToRadians : \"$angle\" } }"); } - - @Test // DATAMONGO - 3708 - void rendersASin() { - assertThat(valueOf("field").asin().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $asin : \"$field\" }"); + @Test // GH-3708 + void rendersASin() { + assertThat(valueOf("field").asin().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $asin : \"$field\" }"); } - - @Test // DATAMONGO - 3708 - void rendersASinh() { - assertThat(valueOf("field").asinh().toDocument(Aggregation.DEFAULT_CONTEXT)) - .isEqualTo("{ $asinh : \"$field\" }"); + @Test // GH-3708 + void rendersASinh() { + assertThat(valueOf("field").asinh().toDocument(Aggregation.DEFAULT_CONTEXT)).isEqualTo("{ $asinh : \"$field\" }"); } @Test // GH-3710 diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java index 8077f604e5..899e02a172 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/aggregation/SpelExpressionTransformerUnitTests.java @@ -1078,13 +1078,13 @@ void shouldRenderSin() { void shouldRenderSinh() { assertThat(transform("sinh(angle)")).isEqualTo("{ \"$sinh\" : \"$angle\"}"); } - - @Test // DATAMONGO-3708 + + @Test // GH-3708 void shouldRenderASin() { assertThat(transform("asin(number)")).isEqualTo("{ \"$asin\" : \"$number\"}"); } - @Test // DATAMONGO-3708 + @Test // GH-3708 void shouldRenderASinh() { assertThat(transform("asinh(number)")).isEqualTo("{ \"$asinh\" : \"$number\"}"); } @@ -1108,17 +1108,17 @@ void shouldRenderTan() { void shouldRenderTanh() { assertThat(transform("tanh(angle)")).isEqualTo("{ \"$tanh\" : \"$angle\"}"); } - + @Test // DATAMONGO - 3709 void shouldRenderATan() { assertThat(transform("atan(number)")).isEqualTo("{ \"$atan\" : \"$number\"}"); } - + @Test // DATAMONGO - 3709 void shouldRenderATan2() { assertThat(transform("atan2(number1,number2)")).isEqualTo("{ \"$atan2\" : [ \"$number1\" , \"$number2\" ] }"); } - + @Test // DATAMONGO - 3709 void shouldRenderATanh() { assertThat(transform("atanh(number)")).isEqualTo("{ \"$atanh\" : \"$number\"}"); diff --git a/src/main/asciidoc/reference/aggregation-framework.adoc b/src/main/asciidoc/reference/aggregation-framework.adoc index 387a0acf65..45315cda36 100644 --- a/src/main/asciidoc/reference/aggregation-framework.adoc +++ b/src/main/asciidoc/reference/aggregation-framework.adoc @@ -85,7 +85,7 @@ At the time of this writing, we provide support for the following Aggregation Op | `addToSet`, `covariancePop`, `covarianceSamp`, `expMovingAvg`, `first`, `last`, `max`, `min`, `avg`, `push`, `sum`, `count` (+++*+++), `stdDevPop`, `stdDevSamp` | Arithmetic Aggregation Operators -| `abs`, `add` (+++*+++ via `plus`), `atan`, `atan2`, `atanh`, `ceil`, `cos`, `cosh`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `sin`, `sinh`, `tan`, `tanh`, `trunc` +| `abs`, `add` (+++*+++ via `plus`), `asin`, `asin`, `atan`, `atan2`, `atanh`, `ceil`, `cos`, `cosh`, `derivative`, `divide`, `exp`, `floor`, `integral`, `ln`, `log`, `log10`, `mod`, `multiply`, `pow`, `round`, `sqrt`, `subtract` (+++*+++ via `minus`), `sin`, `sinh`, `tan`, `tanh`, `trunc` | String Aggregation Operators | `concat`, `substr`, `toLower`, `toUpper`, `strcasecmp`, `indexOfBytes`, `indexOfCP`, `regexFind`, `regexFindAll`, `regexMatch`, `split`, `strLenBytes`, `strLenCP`, `substrCP`, `trim`, `ltrim`, `rtim` From c8307d5a39d246a245db2866a55cee813edd888d Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 7 Sep 2021 11:07:27 +0200 Subject: [PATCH 121/983] Allow one-to-many style lookups with via `@DocumentReference`. This commit adds support for relational style One-To-Many references using a combination of ReadonlyProperty and @DocumentReference. It allows to link types without explicitly storing the linking values within the document itself. @Document class Publisher { @Id ObjectId id; // ... @ReadOnlyProperty @DocumentReference(lookup="{'publisherId':?#{#self._id} }") List books; } Closes: #3798 Original pull request: #3802. --- .../convert/DefaultReferenceResolver.java | 2 +- .../core/convert/DocumentReferenceSource.java | 63 +++++++++++++++++++ .../core/convert/MappingMongoConverter.java | 16 +++-- .../core/convert/ReferenceLookupDelegate.java | 52 +++++++++++---- .../MongoTemplateDocumentReferenceTests.java | 48 ++++++++++++++ .../reference/document-references.adoc | 56 +++++++++++++++++ 6 files changed, 218 insertions(+), 19 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java index f801b8d990..62e713065f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DefaultReferenceResolver.java @@ -108,6 +108,6 @@ private Object createLazyLoadingProxy(MongoPersistentProperty property, Object s ReferenceLookupDelegate referenceLookupDelegate, LookupFunction lookupFunction, MongoEntityReader entityReader) { return proxyFactory.createLazyLoadingProxy(property, it -> { return referenceLookupDelegate.readReference(it, source, lookupFunction, entityReader); - }, source); + }, source instanceof DocumentReferenceSource ? ((DocumentReferenceSource)source).getTargetSource() : source); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java new file mode 100644 index 0000000000..03e5eb0d5d --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java @@ -0,0 +1,63 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.convert; + +import org.springframework.lang.Nullable; + +/** + * The source object to resolve document references upon. Encapsulates the actual source and the reference specific + * values. + * + * @author Christoph Strobl + * @since 3.3 + */ +public class DocumentReferenceSource { + + private final Object self; + + @Nullable private final Object targetSource; + + /** + * Create a new instance of {@link DocumentReferenceSource}. + * + * @param self the entire wrapper object holding references. Must not be {@literal null}. + * @param targetSource the reference value source. + */ + DocumentReferenceSource(Object self, @Nullable Object targetSource) { + + this.self = self; + this.targetSource = targetSource; + } + + /** + * Get the outer document. + * + * @return never {@literal null}. + */ + public Object getSelf() { + return self; + } + + /** + * Get the actual (property specific) reference value. + * + * @return can be {@literal null}. + */ + @Nullable + public Object getTargetSource() { + return targetSource; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index a60c853c33..5a2c3e952a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -38,7 +38,6 @@ import org.bson.types.ObjectId; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; @@ -524,10 +523,6 @@ private void readAssociation(Association association, P MongoPersistentProperty property = association.getInverse(); Object value = documentAccessor.get(property); - if (value == null) { - return; - } - if (property.isDocumentReference() || (!property.isDbReference() && property.findAnnotation(Reference.class) != null)) { @@ -535,17 +530,26 @@ private void readAssociation(Association association, P if (conversionService.canConvert(DocumentPointer.class, property.getActualType())) { + if(value == null) { + return; + } + DocumentPointer pointer = () -> value; // collection like special treatment accessor.setProperty(property, conversionService.convert(pointer, property.getActualType())); } else { + accessor.setProperty(property, - dbRefResolver.resolveReference(property, value, referenceLookupDelegate, context::convert)); + dbRefResolver.resolveReference(property, new DocumentReferenceSource(documentAccessor.getDocument(), documentAccessor.get(property)), referenceLookupDelegate, context::convert)); } return; } + if (value == null) { + return; + } + DBRef dbref = value instanceof DBRef ? (DBRef) value : null; accessor.setProperty(property, dbRefResolver.resolveDbRef(property, dbref, callback, handler)); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java index 3ca730452f..e16f9024b5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java @@ -87,17 +87,20 @@ public ReferenceLookupDelegate( * Read the reference expressed by the given property. * * @param property the reference defining property. Must not be {@literal null}. THe - * @param value the source value identifying to the referenced entity. Must not be {@literal null}. + * @param source the source value identifying to the referenced entity. Must not be {@literal null}. * @param lookupFunction to execute a lookup query. Must not be {@literal null}. * @param entityReader the callback to convert raw source values into actual domain types. Must not be * {@literal null}. * @return can be {@literal null}. */ @Nullable - public Object readReference(MongoPersistentProperty property, Object value, LookupFunction lookupFunction, + public Object readReference(MongoPersistentProperty property, Object source, LookupFunction lookupFunction, MongoEntityReader entityReader) { - DocumentReferenceQuery filter = computeFilter(property, value, spELContext); + Object value = source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getTargetSource() + : source; + + DocumentReferenceQuery filter = computeFilter(property, source, spELContext); ReferenceCollection referenceCollection = computeReferenceContext(property, value, spELContext); Iterable result = lookupFunction.apply(filter, referenceCollection); @@ -196,8 +199,16 @@ private T parseValueOrGet(String value, ParameterBindingContext bindingConte ParameterBindingContext bindingContext(MongoPersistentProperty property, Object source, SpELContext spELContext) { - return new ParameterBindingContext(valueProviderFor(source), spELContext.getParser(), + ValueProvider valueProvider; + if (source instanceof DocumentReferenceSource) { + valueProvider = valueProviderFor(((DocumentReferenceSource) source).getTargetSource()); + } else { + valueProvider = valueProviderFor(source); + } + + return new ParameterBindingContext(valueProvider, spELContext.getParser(), () -> evaluationContextFor(property, source, spELContext)); + } ValueProvider valueProviderFor(Object source) { @@ -212,9 +223,18 @@ ValueProvider valueProviderFor(Object source) { EvaluationContext evaluationContextFor(MongoPersistentProperty property, Object source, SpELContext spELContext) { - EvaluationContext ctx = spELContext.getEvaluationContext(source); - ctx.setVariable("target", source); - ctx.setVariable(property.getName(), source); + Object target = source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getTargetSource() + : source; + + if (target == null) { + target = new Document(); + } + + EvaluationContext ctx = spELContext.getEvaluationContext(target); + ctx.setVariable("target", target); + ctx.setVariable("self", + source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getSelf() : source); + ctx.setVariable(property.getName(), target); return ctx; } @@ -223,22 +243,30 @@ EvaluationContext evaluationContextFor(MongoPersistentProperty property, Object * Compute the query to retrieve linked documents. * * @param property must not be {@literal null}. - * @param value must not be {@literal null}. + * @param source must not be {@literal null}. * @param spELContext must not be {@literal null}. * @return never {@literal null}. */ @SuppressWarnings("unchecked") - DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object value, SpELContext spELContext) { + DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object source, SpELContext spELContext) { DocumentReference documentReference = property.isDocumentReference() ? property.getDocumentReference() : ReferenceEmulatingDocumentReference.INSTANCE; String lookup = documentReference.lookup(); - Document sort = parseValueOrGet(documentReference.sort(), bindingContext(property, value, spELContext), + Object value = source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getTargetSource() + : source; + + Document sort = parseValueOrGet(documentReference.sort(), bindingContext(property, source, spELContext), () -> new Document()); - if (property.isCollectionLike() && value instanceof Collection) { + if (property.isCollectionLike() && (value instanceof Collection || value == null)) { + + if (value == null) { + return new ListDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, source, spELContext)), + sort); + } List ors = new ArrayList<>(); for (Object entry : (Collection) value) { @@ -263,7 +291,7 @@ DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object va return new MapDocumentReferenceQuery(new Document("$or", filterMap.values()), sort, filterMap); } - return new SingleDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, value, spELContext)), sort); + return new SingleDocumentReferenceQuery(codec.decode(lookup, bindingContext(property, source, spELContext)), sort); } enum ReferenceEmulatingDocumentReference implements DocumentReference { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java index d6bcc10e49..06d288d1f5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java @@ -39,6 +39,7 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; +import org.springframework.data.annotation.ReadOnlyProperty; import org.springframework.data.annotation.Reference; import org.springframework.data.convert.WritingConverter; import org.springframework.data.mongodb.core.convert.LazyLoadingTestUtils; @@ -1049,7 +1050,34 @@ void updateWhenUsingAtReferenceDirectly() { }); assertThat(target).containsEntry("publisher", "p-1"); + } + + @Test // GH-3798 + void allowsOneToMayStyleLookupsUsingSelfVariable() { + + OneToManyStyleBook book1 = new OneToManyStyleBook(); + book1.id = "id-1"; + book1.publisherId = "p-100"; + + OneToManyStyleBook book2 = new OneToManyStyleBook(); + book2.id = "id-2"; + book2.publisherId = "p-200"; + + OneToManyStyleBook book3 = new OneToManyStyleBook(); + book3.id = "id-3"; + book3.publisherId = "p-100"; + + template.save(book1); + template.save(book2); + template.save(book3); + OneToManyStylePublisher publisher = new OneToManyStylePublisher(); + publisher.id = "p-100"; + + template.save(publisher); + + OneToManyStylePublisher target = template.findOne(query(where("id").is(publisher.id)), OneToManyStylePublisher.class); + assertThat(target.books).containsExactlyInAnyOrder(book1, book3); } @Data @@ -1293,4 +1321,24 @@ static class UsingAtReference { @Reference // Publisher publisher; } + + @Data + static class OneToManyStyleBook { + + @Id + String id; + + private String publisherId; + } + + @Data + static class OneToManyStylePublisher { + + @Id + String id; + + @ReadOnlyProperty + @DocumentReference(lookup="{'publisherId':?#{#self._id} }") + List books; + } } diff --git a/src/main/asciidoc/reference/document-references.adoc b/src/main/asciidoc/reference/document-references.adoc index 885d2d6ade..23bc025e80 100644 --- a/src/main/asciidoc/reference/document-references.adoc +++ b/src/main/asciidoc/reference/document-references.adoc @@ -262,6 +262,62 @@ class Publisher { <2> The field value placeholders of the lookup query (like `acc`) is used to form the reference document. ==== +It is also possible to model relational style _One-To-Many_ references using a combination of `@ReadonlyProperty` and `@DocumentReference`. +This approach allows to link types without explicitly storing the linking values within the document itself as shown in the snipped below. + +==== +[source,java] +---- +@Document +class Book { + + @Id + ObjectId id; + String title; + List author; + + ObjectId publisherId; <1> +} + +@Document +class Publisher { + + @Id + ObjectId id; + String acronym; + String name; + + @ReadOnlyProperty <2> + @DocumentReference(lookup="{'publisherId':?#{#self._id} }") <3> + List books; +} +---- + +.`Book` document +[source,json] +---- +{ + "_id" : 9a48e32, + "title" : "The Warded Man", + "author" : ["Peter V. Brett"], + "publisherId" : 8cfb002 +} +---- + +.`Publisher` document +[source,json] +---- +{ + "_id" : 8cfb002, + "acronym" : "DR", + "name" : "Del Rey" +} +---- +<1> Set up the link from `Book` to `Publisher` by storing the `Publisher.id` within the `Book` document. +<2> Mark the property holding the references to be read only. This prevents storing references to individual ``Book``s with the `Publisher` document. +<3> Use the `#self` variable to access values within the `Publisher` document and in this retrieve `Books` with matching `publisherId`. +==== + With all the above in place it is possible to model all kind of associations between entities. Have a look at the non-exhaustive list of samples below to get feeling for what is possible. From 977e5e4c5c877e17a25de14bb47f98f8fa802161 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 8 Sep 2021 13:50:54 +0200 Subject: [PATCH 122/983] Polishing. Tweak reference documentation wording. Extract self/target source dereferencing into utility methods. See: #3798 Original pull request: #3802. --- .../core/convert/DocumentReferenceSource.java | 25 +++++++++++++++++-- .../core/convert/ReferenceLookupDelegate.java | 17 +++---------- .../reference/document-references.adoc | 7 +++--- 3 files changed, 31 insertions(+), 18 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java index 03e5eb0d5d..89d7360e4d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentReferenceSource.java @@ -28,11 +28,11 @@ public class DocumentReferenceSource { private final Object self; - @Nullable private final Object targetSource; + private final @Nullable Object targetSource; /** * Create a new instance of {@link DocumentReferenceSource}. - * + * * @param self the entire wrapper object holding references. Must not be {@literal null}. * @param targetSource the reference value source. */ @@ -60,4 +60,25 @@ public Object getSelf() { public Object getTargetSource() { return targetSource; } + + /** + * Dereference a {@code targetSource} if it is a {@link DocumentReferenceSource} or return {@code source} otherwise. + * + * @param source + * @return + */ + @Nullable + static Object getTargetSource(Object source) { + return source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getTargetSource() : source; + } + + /** + * Dereference a {@code self} object if it is a {@link DocumentReferenceSource} or return {@code self} otherwise. + * + * @param self + * @return + */ + static Object getSelf(Object self) { + return self instanceof DocumentReferenceSource ? ((DocumentReferenceSource) self).getSelf() : self; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java index e16f9024b5..36ccc23a6b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java @@ -174,7 +174,6 @@ private ReferenceCollection computeReferenceContext(MongoPersistentProperty prop * @param * @return can be {@literal null}. */ - @Nullable @SuppressWarnings("unchecked") private T parseValueOrGet(String value, ParameterBindingContext bindingContext, Supplier defaultValue) { @@ -199,16 +198,10 @@ private T parseValueOrGet(String value, ParameterBindingContext bindingConte ParameterBindingContext bindingContext(MongoPersistentProperty property, Object source, SpELContext spELContext) { - ValueProvider valueProvider; - if (source instanceof DocumentReferenceSource) { - valueProvider = valueProviderFor(((DocumentReferenceSource) source).getTargetSource()); - } else { - valueProvider = valueProviderFor(source); - } + ValueProvider valueProvider = valueProviderFor(DocumentReferenceSource.getTargetSource(source)); return new ParameterBindingContext(valueProvider, spELContext.getParser(), () -> evaluationContextFor(property, source, spELContext)); - } ValueProvider valueProviderFor(Object source) { @@ -232,8 +225,7 @@ EvaluationContext evaluationContextFor(MongoPersistentProperty property, Object EvaluationContext ctx = spELContext.getEvaluationContext(target); ctx.setVariable("target", target); - ctx.setVariable("self", - source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getSelf() : source); + ctx.setVariable("self", DocumentReferenceSource.getSelf(source)); ctx.setVariable(property.getName(), target); return ctx; @@ -255,11 +247,10 @@ DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object so String lookup = documentReference.lookup(); - Object value = source instanceof DocumentReferenceSource ? ((DocumentReferenceSource) source).getTargetSource() - : source; + Object value = DocumentReferenceSource.getTargetSource(source); Document sort = parseValueOrGet(documentReference.sort(), bindingContext(property, source, spELContext), - () -> new Document()); + Document::new); if (property.isCollectionLike() && (value instanceof Collection || value == null)) { diff --git a/src/main/asciidoc/reference/document-references.adoc b/src/main/asciidoc/reference/document-references.adoc index 23bc025e80..b7d55678a5 100644 --- a/src/main/asciidoc/reference/document-references.adoc +++ b/src/main/asciidoc/reference/document-references.adoc @@ -263,7 +263,7 @@ class Publisher { ==== It is also possible to model relational style _One-To-Many_ references using a combination of `@ReadonlyProperty` and `@DocumentReference`. -This approach allows to link types without explicitly storing the linking values within the document itself as shown in the snipped below. +This approach allows link types without storing the linking values within the owning document but rather on the referencing document as shown in the example below. ==== [source,java] @@ -313,8 +313,9 @@ class Publisher { "name" : "Del Rey" } ---- -<1> Set up the link from `Book` to `Publisher` by storing the `Publisher.id` within the `Book` document. -<2> Mark the property holding the references to be read only. This prevents storing references to individual ``Book``s with the `Publisher` document. +<1> Set up the link from `Book` (reference) to `Publisher` (owner) by storing the `Publisher.id` within the `Book` document. +<2> Mark the property holding the references to be readonly. +This prevents storing references to individual ``Book``s with the `Publisher` document. <3> Use the `#self` variable to access values within the `Publisher` document and in this retrieve `Books` with matching `publisherId`. ==== From ada7e199a4dcbc2cea45d6c0e13d5a9cb8fde7b5 Mon Sep 17 00:00:00 2001 From: Oliver Drotbohm Date: Tue, 7 Sep 2021 14:54:12 +0200 Subject: [PATCH 123/983] Properly detect all supported identifier annotations as explicitly annotated. We now simply delegate to AnnotationBasedPersistentProperty.isIdProperty() for the detection of annotated identifiers. The previous, manual identifier check was preventing additional identifier annotations, supported by ABP, to be considered, too. Fixes #3803. --- spring-data-mongodb/pom.xml | 9 +++++++++ .../mapping/BasicMongoPersistentProperty.java | 3 +-- .../BasicMongoPersistentPropertyUnitTests.java | 15 ++++++++++++++- 3 files changed, 24 insertions(+), 3 deletions(-) diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 1f157e75bc..2f73c10eba 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -317,6 +317,15 @@ test + + + + org.jmolecules + jmolecules-ddd + ${jmolecules} + test + + diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java index 87eb56b732..1315757896 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java @@ -22,7 +22,6 @@ import org.bson.types.ObjectId; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.data.annotation.Id; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.model.AnnotationBasedPersistentProperty; @@ -115,7 +114,7 @@ public boolean isIdProperty() { */ @Override public boolean isExplicitIdProperty() { - return isAnnotationPresent(Id.class); + return super.isIdProperty(); } /** diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java index bbcb8dada0..fffa861914 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java @@ -28,9 +28,9 @@ import org.bson.Document; import org.bson.types.ObjectId; +import org.jmolecules.ddd.annotation.Identity; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; - import org.springframework.core.annotation.AliasFor; import org.springframework.data.annotation.Id; import org.springframework.data.mapping.MappingException; @@ -241,6 +241,15 @@ void fieldTypeShouldBeDocumentForPropertiesAnnotatedIdWhenAComplexTypeAndFieldTy assertThat(property.getFieldType()).isEqualTo(Document.class); } + @Test + void considersJMoleculesIdentityExplicitlyAnnotatedIdentifier() { + + MongoPersistentProperty property = getPropertyFor(WithJMoleculesIdentity.class, "identifier"); + + assertThat(property.isIdProperty()).isTrue(); + assertThat(property.isExplicitIdProperty()).isTrue(); + } + private MongoPersistentProperty getPropertyFor(Field field) { return getPropertyFor(entity, field); } @@ -369,4 +378,8 @@ static class WithComplexId { @Id @org.springframework.data.mongodb.core.mapping.Field ComplexId id; } + + static class WithJMoleculesIdentity { + @Identity ObjectId identifier; + } } From cba7eaba4c442c426c3cba15be5d2c7073ebdb16 Mon Sep 17 00:00:00 2001 From: Oliver Drotbohm Date: Tue, 7 Sep 2021 14:54:35 +0200 Subject: [PATCH 124/983] Polishing. Formatting and indentation in parent project's pom.xml. See #3803 --- pom.xml | 8 ++++---- .../mapping/BasicMongoPersistentPropertyUnitTests.java | 3 +-- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/pom.xml b/pom.xml index 5d28c8a5c5..4aa47bbf2b 100644 --- a/pom.xml +++ b/pom.xml @@ -141,11 +141,11 @@ sonatype-libs-snapshot https://oss.sonatype.org/content/repositories/snapshots - false - + false + - true - + true + diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java index fffa861914..66ae0199fc 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java @@ -356,8 +356,7 @@ static class DocumentWithComposedAnnotations { @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) @Id - static @interface ComposedIdAnnotation { - } + static @interface ComposedIdAnnotation {} static class WithStringMongoId { From 061c28f84ac8c8a302dffc6e3d7264d3703a0f6d Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 8 Sep 2021 13:58:37 +0200 Subject: [PATCH 125/983] Polishing. Add ticket reference to tests. See #3803 --- .../core/mapping/BasicMongoPersistentPropertyUnitTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java index 66ae0199fc..d731854a02 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentPropertyUnitTests.java @@ -241,7 +241,7 @@ void fieldTypeShouldBeDocumentForPropertiesAnnotatedIdWhenAComplexTypeAndFieldTy assertThat(property.getFieldType()).isEqualTo(Document.class); } - @Test + @Test // GH-3803 void considersJMoleculesIdentityExplicitlyAnnotatedIdentifier() { MongoPersistentProperty property = getPropertyFor(WithJMoleculesIdentity.class, "identifier"); From 4e960a968288833f6e8ca6a8ce429eef226972a4 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 8 Sep 2021 09:24:15 +0200 Subject: [PATCH 126/983] Fix document reference on empty reference arrays. This commit fixes an issue caused by empty reference arrays. Closes #3805 Original pull request: #3807. --- .../core/convert/ReferenceLookupDelegate.java | 9 ++++- .../MongoTemplateDocumentReferenceTests.java | 39 +++++++++++++++++++ 2 files changed, 47 insertions(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java index 36ccc23a6b..a2726e6338 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java @@ -19,6 +19,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -122,7 +123,9 @@ private ReferenceCollection computeReferenceContext(MongoPersistentProperty prop // Use the first value as a reference for others in case of collection like if (value instanceof Iterable) { - value = ((Iterable) value).iterator().next(); + + Iterator iterator = ((Iterable) value).iterator(); + value = iterator.hasNext() ? iterator.next() : new Document(); } // handle DBRef value @@ -266,6 +269,10 @@ DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object so ors.add(decoded); } + if(ors.isEmpty()) { + return new ListDocumentReferenceQuery(new Document("_id", new Document("$exists", false)), sort); + } + return new ListDocumentReferenceQuery(new Document("$or", ors), sort); } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java index 06d288d1f5..2b96b3dc22 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java @@ -25,6 +25,7 @@ import lombok.Setter; import lombok.ToString; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; @@ -679,6 +680,41 @@ void loadCollectionReferenceWithMissingRefs() { assertThat(result.getSimpleValueRef()).containsExactly(new SimpleObjectRef("ref-2", "me-the-2-referenced-object")); } + @Test // GH-3805 + void loadEmptyCollectionReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + // an empty reference array. + Document source = new Document("_id", "id-1").append("value", "v1").append("simplePreinitializedValueRef", + Collections.emptyList()); + + template.execute(db -> { + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.simplePreinitializedValueRef).isEmpty(); + } + + @Test // GH-3805 + void loadNoExistingCollectionReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + // no reference array at all + Document source = new Document("_id", "id-1").append("value", "v1"); + + template.execute(db -> { + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.simplePreinitializedValueRef).isEmpty(); + } + @Test // GH-3602 void queryForReference() { @@ -1122,6 +1158,9 @@ static class CollectionRefRoot { @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // List simpleValueRef; + @DocumentReference + List simplePreinitializedValueRef = new ArrayList<>(); + @DocumentReference(lookup = "{ '_id' : '?#{#target}' }", sort = "{ '_id' : -1 } ") // List simpleSortedValueRef; From 270456ed81ae3a11d08ec6a3a3bffd8eca9b8d77 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 8 Sep 2021 14:18:17 +0200 Subject: [PATCH 127/983] Polishing. Extract query that yields no hits into constant. Guard Map-typed reference properties against empty $or. See #3805 Original pull request: #3807. --- .../core/convert/ReferenceLookupDelegate.java | 25 +++++++++++++------ .../MongoTemplateDocumentReferenceTests.java | 21 ++++++++++++++++ 2 files changed, 38 insertions(+), 8 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java index a2726e6338..dbbdbe99eb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/ReferenceLookupDelegate.java @@ -62,6 +62,8 @@ */ public final class ReferenceLookupDelegate { + private static final Document NO_RESULTS_PREDICATE = new Document("_id", new Document("$exists", false)); + private final MappingContext, MongoPersistentProperty> mappingContext; private final SpELContext spELContext; private final ParameterBindingDocumentCodec codec; @@ -262,15 +264,17 @@ DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object so sort); } - List ors = new ArrayList<>(); - for (Object entry : (Collection) value) { + Collection objects = (Collection) value; - Document decoded = codec.decode(lookup, bindingContext(property, entry, spELContext)); - ors.add(decoded); + if (objects.isEmpty()) { + return new ListDocumentReferenceQuery(NO_RESULTS_PREDICATE, sort); } - if(ors.isEmpty()) { - return new ListDocumentReferenceQuery(new Document("_id", new Document("$exists", false)), sort); + List ors = new ArrayList<>(objects.size()); + for (Object entry : objects) { + + Document decoded = codec.decode(lookup, bindingContext(property, entry, spELContext)); + ors.add(decoded); } return new ListDocumentReferenceQuery(new Document("$or", ors), sort); @@ -278,9 +282,14 @@ DocumentReferenceQuery computeFilter(MongoPersistentProperty property, Object so if (property.isMap() && value instanceof Map) { - Map filterMap = new LinkedHashMap<>(); + Set> entries = ((Map) value).entrySet(); + if (entries.isEmpty()) { + return new MapDocumentReferenceQuery(NO_RESULTS_PREDICATE, sort, Collections.emptyMap()); + } + + Map filterMap = new LinkedHashMap<>(entries.size()); - for (Entry entry : ((Map) value).entrySet()) { + for (Entry entry : entries) { Document decoded = codec.decode(lookup, bindingContext(property, entry.getValue(), spELContext)); filterMap.put(entry.getKey(), decoded); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java index 2b96b3dc22..c63e7a1115 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java @@ -698,6 +698,24 @@ void loadEmptyCollectionReference() { assertThat(result.simplePreinitializedValueRef).isEmpty(); } + @Test // GH-3805 + void loadEmptyMapReference() { + + String rootCollectionName = template.getCollectionName(CollectionRefRoot.class); + + // an empty reference array. + Document source = new Document("_id", "id-1").append("value", "v1").append("simplePreinitializedMapRef", + new Document()); + + template.execute(db -> { + db.getCollection(rootCollectionName).insertOne(source); + return null; + }); + + CollectionRefRoot result = template.findOne(query(where("id").is("id-1")), CollectionRefRoot.class); + assertThat(result.simplePreinitializedMapRef).isEmpty(); + } + @Test // GH-3805 void loadNoExistingCollectionReference() { @@ -1167,6 +1185,9 @@ static class CollectionRefRoot { @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // Map mapValueRef; + @DocumentReference // + Map simplePreinitializedMapRef = new LinkedHashMap<>(); + @Field("simple-value-ref-annotated-field-name") // @DocumentReference(lookup = "{ '_id' : '?#{#target}' }") // List simpleValueRefWithAnnotatedFieldName; From f128e6df152bc559bbae6e07592307d3f3fc402d Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 8 Sep 2021 10:29:02 +0200 Subject: [PATCH 128/983] Fix `@DocumentReference` resolution for properties used in constructor. This commit fixes an issue that prevented referenced entities from being used as constructor arguments. Closes: #3806 Original pull request: #3810. --- .../core/convert/MappingMongoConverter.java | 25 +++-- .../MongoTemplateDocumentReferenceTests.java | 106 ++++++++++++++++++ 2 files changed, 124 insertions(+), 7 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 5a2c3e952a..07709df365 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -530,7 +530,7 @@ private void readAssociation(Association association, P if (conversionService.canConvert(DocumentPointer.class, property.getActualType())) { - if(value == null) { + if (value == null) { return; } @@ -541,7 +541,9 @@ private void readAssociation(Association association, P } else { accessor.setProperty(property, - dbRefResolver.resolveReference(property, new DocumentReferenceSource(documentAccessor.getDocument(), documentAccessor.get(property)), referenceLookupDelegate, context::convert)); + dbRefResolver.resolveReference(property, + new DocumentReferenceSource(documentAccessor.getDocument(), documentAccessor.get(property)), + referenceLookupDelegate, context::convert)); } return; } @@ -875,10 +877,12 @@ protected List createCollection(Collection collection, MongoPersisten if (property.isAssociation()) { List targetCollection = collection.stream().map(it -> { - return documentPointerFactory.computePointer(mappingContext, property, it, property.getActualType()).getPointer(); + return documentPointerFactory.computePointer(mappingContext, property, it, property.getActualType()) + .getPointer(); }).collect(Collectors.toList()); - return writeCollectionInternal(targetCollection, ClassTypeInformation.from(DocumentPointer.class), new ArrayList<>()); + return writeCollectionInternal(targetCollection, ClassTypeInformation.from(DocumentPointer.class), + new ArrayList<>()); } if (property.hasExplicitWriteTarget()) { @@ -931,7 +935,8 @@ protected Bson createMap(Map map, MongoPersistentProperty proper if (property.isDbReference()) { document.put(simpleKey, value != null ? createDBRef(value, property) : null); } else { - document.put(simpleKey, documentPointerFactory.computePointer(mappingContext, property, value, property.getActualType()).getPointer()); + document.put(simpleKey, documentPointerFactory + .computePointer(mappingContext, property, value, property.getActualType()).getPointer()); } } else { @@ -1814,6 +1819,11 @@ public T getPropertyValue(MongoPersistentProperty property) { return (T) dbRefResolver.resolveDbRef(property, dbref, callback, dbRefProxyHandler); } + if (property.isDocumentReference()) { + return (T) dbRefResolver.resolveReference(property, accessor.get(property), referenceLookupDelegate, + context::convert); + } + return super.getPropertyValue(property); } } @@ -2036,7 +2046,7 @@ public S convert(Object source, TypeInformation if (typeHint.isMap()) { - if(ClassUtils.isAssignable(Document.class, typeHint.getType())) { + if (ClassUtils.isAssignable(Document.class, typeHint.getType())) { return (S) documentConverter.convert(this, BsonUtils.asBson(source), typeHint); } @@ -2044,7 +2054,8 @@ public S convert(Object source, TypeInformation return (S) mapConverter.convert(this, BsonUtils.asBson(source), typeHint); } - throw new IllegalArgumentException(String.format("Expected map like structure but found %s", source.getClass())); + throw new IllegalArgumentException( + String.format("Expected map like structure but found %s", source.getClass())); } if (source instanceof DBRef) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java index c63e7a1115..3abd3a3add 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateDocumentReferenceTests.java @@ -733,6 +733,52 @@ void loadNoExistingCollectionReference() { assertThat(result.simplePreinitializedValueRef).isEmpty(); } + @Test // GH-3806 + void resolveReferenceWhenUsedAsCtorArgument() { + + Publisher publisher = new Publisher(); + publisher.id = "p-111"; + publisher.name = "ppp"; + + template.save(publisher); + + WithRequiredArgsCtor source = new WithRequiredArgsCtor("id-1", publisher); + + template.save(source); + + WithRequiredArgsCtor target = template.findOne(query(where("id").is(source.id)), WithRequiredArgsCtor.class); + assertThat(target.publisher).isNotNull(); + } + + @Test // GH-3806 + void resolveLazyReferenceWhenUsedAsCtorArgument() { + + Publisher publisher = new Publisher(); + publisher.id = "p-111"; + publisher.name = "ppp"; + + template.save(publisher); + + WithLazyRequiredArgsCtor source = new WithLazyRequiredArgsCtor("id-1", publisher); + + template.save(source); + + WithLazyRequiredArgsCtor target = template.findOne(query(where("id").is(source.id)), WithLazyRequiredArgsCtor.class); + + // proxy not yet resolved + LazyLoadingTestUtils.assertProxy(target.publisher, (proxy) -> { + + assertThat(proxy.isResolved()).isFalse(); + assertThat(proxy.currentValue()).isNull(); + }); + + // resolve the proxy by invoking a method on it + assertThat(target.getPublisher().getName()).isEqualTo("ppp"); + LazyLoadingTestUtils.assertProxy(target.publisher, (proxy) -> { + assertThat(proxy.isResolved()).isTrue(); + }); + } + @Test // GH-3602 void queryForReference() { @@ -1371,6 +1417,30 @@ static class Publisher { String id; String acronym; String name; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getAcronym() { + return acronym; + } + + public void setAcronym(String acronym) { + this.acronym = acronym; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } } @Data @@ -1401,4 +1471,40 @@ static class OneToManyStylePublisher { @DocumentReference(lookup="{'publisherId':?#{#self._id} }") List books; } + + static class WithRequiredArgsCtor { + + final String id; + + @DocumentReference + final Publisher publisher; + + public WithRequiredArgsCtor(String id, Publisher publisher) { + + this.id = id; + this.publisher = publisher; + } + } + + static class WithLazyRequiredArgsCtor { + + final String id; + + @DocumentReference(lazy = true) + final Publisher publisher; + + public WithLazyRequiredArgsCtor(String id, Publisher publisher) { + + this.id = id; + this.publisher = publisher; + } + + public String getId() { + return id; + } + + public Publisher getPublisher() { + return publisher; + } + } } From 9014f770d8027c4e1bb35fa91b80d16ac4f6e09e Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 8 Sep 2021 13:33:46 +0200 Subject: [PATCH 129/983] Fix slice argument in query fields projection. We now use a Collection instead of an Array to pass on $slice projection values for offset and limit. Closes: #3811 Original pull request: #3812. --- .../data/mongodb/core/query/Field.java | 3 ++- .../data/mongodb/core/MongoTemplateTests.java | 17 +++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Field.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Field.java index 0561bbdca6..02450505b6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Field.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Field.java @@ -15,6 +15,7 @@ */ package org.springframework.data.mongodb.core.query; +import java.util.Arrays; import java.util.HashMap; import java.util.Map; import java.util.Map.Entry; @@ -192,7 +193,7 @@ public Field slice(String field, int size) { */ public Field slice(String field, int offset, int size) { - slices.put(field, new Integer[] { offset, size }); + slices.put(field, Arrays.asList(offset, size)); return this; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java index 28cdaa4830..33ae0ef994 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateTests.java @@ -3768,6 +3768,23 @@ void shouldFindSubdocumentWithNullCorrectly() { assertThat(loaded).isNotNull(); } + @Test // GH-3811 + public void sliceShouldLimitCollectionValues() { + + DocumentWithCollectionOfSimpleType source = new DocumentWithCollectionOfSimpleType(); + source.id = "id-1"; + source.values = Arrays.asList("spring", "data", "mongodb"); + + template.save(source); + + Criteria criteria = Criteria.where("id").is(source.id); + Query query = Query.query(criteria); + query.fields().slice("values", 0, 1); + DocumentWithCollectionOfSimpleType target = template.findOne(query, DocumentWithCollectionOfSimpleType.class); + + assertThat(target.values).containsExactly("spring"); + } + private AtomicReference createAfterSaveReference() { AtomicReference saved = new AtomicReference<>(); From 8fb0e1326b3a33591fca6c7a6ace8fb2088a91ec Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 8 Sep 2021 10:03:45 +0200 Subject: [PATCH 130/983] Introduce `SessionSynchronization.NEVER` to disable transactional participation. SessionSynchronization.NEVER bypasses all transactional integration in cases where applications do not want to make use of transactions so that transaction inspection overhead is avoided. Closes: #3760 Original Pull Request: #3809 --- .../data/mongodb/MongoDatabaseUtils.java | 3 ++- .../mongodb/ReactiveMongoDatabaseUtils.java | 4 ++++ .../data/mongodb/SessionSynchronization.java | 20 +++++++++++++--- .../mongodb/MongoDatabaseUtilsUnitTests.java | 24 +++++++++++++++++++ .../ReactiveMongoDatabaseUtilsUnitTests.java | 14 +++++++++++ 5 files changed, 61 insertions(+), 4 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java index ba8efa536c..c9342ec4f6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java @@ -104,7 +104,8 @@ private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDa Assert.notNull(factory, "Factory must not be null!"); - if (!TransactionSynchronizationManager.isSynchronizationActive()) { + if (sessionSynchronization == SessionSynchronization.NEVER + || !TransactionSynchronizationManager.isSynchronizationActive()) { return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java index 711947a30d..4699ac56c2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java @@ -138,6 +138,10 @@ private static Mono doGetMongoDatabase(@Nullable String dbName, R Assert.notNull(factory, "DatabaseFactory must not be null!"); + if (sessionSynchronization == SessionSynchronization.NEVER) { + return getMongoDatabaseOrDefault(dbName, factory); + } + return TransactionSynchronizationManager.forCurrentTransaction() .filter(TransactionSynchronizationManager::isSynchronizationActive) // .flatMap(synchronizationManager -> { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java index 2223b82391..144d3d3cb3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java @@ -15,13 +15,20 @@ */ package org.springframework.data.mongodb; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; + /** - * {@link SessionSynchronization} is used along with {@link org.springframework.data.mongodb.core.MongoTemplate} to - * define in which type of transactions to participate if any. + * {@link SessionSynchronization} is used along with {@code MongoTemplate} to define in which type of transactions to + * participate if any. * * @author Christoph Strobl * @author Mark Paluch * @since 2.1 + * @see MongoTemplate#setSessionSynchronization(SessionSynchronization) + * @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization) + * @see ReactiveMongoTemplate#setSessionSynchronization(SessionSynchronization) + * @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization) */ public enum SessionSynchronization { @@ -34,5 +41,12 @@ public enum SessionSynchronization { /** * Synchronize with native MongoDB transactions initiated via {@link MongoTransactionManager}. */ - ON_ACTUAL_TRANSACTION; + ON_ACTUAL_TRANSACTION, + + /** + * Do not participate in ongoing transactions. + * + * @since 3.2.5 + */ + NEVER; } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoDatabaseUtilsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoDatabaseUtilsUnitTests.java index 8cb222f0e6..5b0cd81cc2 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoDatabaseUtilsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/MongoDatabaseUtilsUnitTests.java @@ -109,6 +109,30 @@ void shouldNotStartSessionWhenNoTransactionOngoing() { verify(dbFactory, never()).withSession(any(ClientSession.class)); } + @Test // GH-3760 + void shouldJustReturnDatabaseIfSessionSynchronizationDisabled() throws Exception { + + when(dbFactory.getMongoDatabase()).thenReturn(db); + + JtaTransactionManager txManager = new JtaTransactionManager(userTransaction); + TransactionTemplate txTemplate = new TransactionTemplate(txManager); + + txTemplate.execute(new TransactionCallbackWithoutResult() { + + @Override + protected void doInTransactionWithoutResult(TransactionStatus transactionStatus) { + + MongoDatabaseUtils.getDatabase(dbFactory, SessionSynchronization.NEVER); + + assertThat(TransactionSynchronizationManager.hasResource(dbFactory)).isFalse(); + } + }); + + verify(userTransaction).getStatus(); + verifyNoMoreInteractions(userTransaction); + verifyNoInteractions(session); + } + @Test // DATAMONGO-1920 void shouldParticipateInOngoingJtaTransactionWithCommitWhenSessionSychronizationIsAny() throws Exception { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtilsUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtilsUnitTests.java index 60a7ff9a47..a7393a1392 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtilsUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtilsUnitTests.java @@ -88,6 +88,20 @@ void isTransactionActiveShouldLookupTxForActiveTransactionSynchronizationViaTxMa }).as(StepVerifier::create).expectNext(true).verifyComplete(); } + @Test // GH-3760 + void shouldJustReturnDatabaseIfSessionSynchronizationDisabled() { + + when(databaseFactory.getMongoDatabase()).thenReturn(Mono.just(db)); + + ReactiveMongoDatabaseUtils.getDatabase(databaseFactory, SessionSynchronization.NEVER) // + .as(StepVerifier::create) // + .expectNextCount(1) // + .verifyComplete(); + + verify(databaseFactory, never()).getSession(any()); + verify(databaseFactory, never()).withSession(any(ClientSession.class)); + } + @Test // DATAMONGO-2265 void shouldNotStartSessionWhenNoTransactionOngoing() { From a26e78095745ece93ed6711e62f44c1a80ac8a46 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Wed, 8 Sep 2021 10:04:58 +0200 Subject: [PATCH 131/983] Reduce allocations in query and update mapping. Introduce EmptyDocument and utility methods in BsonUtils. Avoid entrySet and iterator creation for document iterations/inspections. Relates to: #3760 Original Pull Request: #3809 --- .../data/mongodb/core/MappedDocument.java | 9 ++ .../data/mongodb/core/QueryOperations.java | 2 +- .../core/convert/DocumentAccessor.java | 2 +- .../core/convert/MappingMongoConverter.java | 14 +-- .../mongodb/core/convert/MongoConverter.java | 3 + .../mongodb/core/convert/QueryMapper.java | 53 +++++++---- .../data/mongodb/core/query/Meta.java | 18 +++- .../data/mongodb/core/query/Query.java | 33 +++++-- .../data/mongodb/core/query/TextQuery.java | 13 +-- .../data/mongodb/core/query/Update.java | 30 +++++- .../data/mongodb/util/BsonUtils.java | 63 ++++++++++++ .../data/mongodb/util/EmptyDocument.java | 95 +++++++++++++++++++ .../mongodb/core/MongoTemplateUnitTests.java | 7 +- .../data/mongodb/core/query/QueryTests.java | 16 ++-- .../query/PartTreeMongoQueryUnitTests.java | 7 +- 15 files changed, 298 insertions(+), 67 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/EmptyDocument.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java index 340c11bb99..e3c1f3d64c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java @@ -156,5 +156,14 @@ public Boolean isIsolated() { public List getArrayFilters() { return delegate.getArrayFilters(); } + + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.query.UpdateDefinition#hasArrayFilters() + */ + @Override + public boolean hasArrayFilters() { + return delegate.hasArrayFilters(); + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java index 1ec8fc9366..e9431aa3d2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java @@ -613,7 +613,7 @@ class UpdateContext extends QueryContext { UpdateContext(MappedDocument update, boolean upsert) { - super(new BasicQuery(new Document(BsonUtils.asMap(update.getIdFilter())))); + super(new BasicQuery(BsonUtils.asDocument(update.getIdFilter()))); this.multi = false; this.upsert = upsert; this.mappedDocument = update; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java index 9c94487a3e..0b31f75341 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/DocumentAccessor.java @@ -135,7 +135,7 @@ public Object get(MongoPersistentProperty property) { */ @Nullable public Object getRawId(MongoPersistentEntity entity) { - return entity.hasIdProperty() ? get(entity.getRequiredIdProperty()) : BsonUtils.asMap(document).get("_id"); + return entity.hasIdProperty() ? get(entity.getRequiredIdProperty()) : BsonUtils.get(document, "_id"); } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java index 07709df365..302c3dad45 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverter.java @@ -25,7 +25,6 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; @@ -1325,21 +1324,22 @@ protected Map readMap(ConversionContext context, Bson bson, Type return map; } - for (Entry entry : sourceMap.entrySet()) { + sourceMap.forEach((k, v) -> { - if (typeMapper.isTypeKey(entry.getKey())) { - continue; + if (typeMapper.isTypeKey(k)) { + return; } - Object key = potentiallyUnescapeMapKey(entry.getKey()); + Object key = potentiallyUnescapeMapKey(k); if (!rawKeyType.isAssignableFrom(key.getClass())) { key = doConvert(key, rawKeyType); } - Object value = entry.getValue(); + Object value = v; map.put(key, value == null ? value : context.convert(value, valueType)); - } + + }); return map; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java index 20499d3173..aff1b8d8e0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/MongoConverter.java @@ -140,6 +140,9 @@ default Object convertId(@Nullable Object id, Class targetType) { if (ObjectId.isValid(id.toString())) { return new ObjectId(id.toString()); } + + // avoid ConversionException as convertToMongoType will return String anyways. + return id; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index e7deb38231..356dd89faa 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -193,12 +193,11 @@ public Document getMappedSort(Document sortObject, @Nullable MongoPersistentEnti Assert.notNull(sortObject, "SortObject must not be null!"); if (sortObject.isEmpty()) { - return new Document(); + return BsonUtils.EMPTY_DOCUMENT; } Document mappedSort = mapFieldsToPropertyNames(sortObject, entity); - mapMetaAttributes(mappedSort, entity, MetaMapping.WHEN_PRESENT); - return mappedSort; + return mapMetaAttributes(mappedSort, entity, MetaMapping.WHEN_PRESENT); } /** @@ -215,42 +214,51 @@ public Document getMappedFields(Document fieldsObject, @Nullable MongoPersistent Assert.notNull(fieldsObject, "FieldsObject must not be null!"); Document mappedFields = mapFieldsToPropertyNames(fieldsObject, entity); - mapMetaAttributes(mappedFields, entity, MetaMapping.FORCE); - return mappedFields; + return mapMetaAttributes(mappedFields, entity, MetaMapping.FORCE); } private Document mapFieldsToPropertyNames(Document fields, @Nullable MongoPersistentEntity entity) { if (fields.isEmpty()) { - return new Document(); + return BsonUtils.EMPTY_DOCUMENT; } Document target = new Document(); - for (Map.Entry entry : BsonUtils.asMap(filterUnwrappedObjects(fields, entity)).entrySet()) { - Field field = createPropertyField(entity, entry.getKey(), mappingContext); + BsonUtils.asMap(filterUnwrappedObjects(fields, entity)).forEach((k, v) -> { + + Field field = createPropertyField(entity, k, mappingContext); if (field.getProperty() != null && field.getProperty().isUnwrapped()) { - continue; + return; } - target.put(field.getMappedKey(), entry.getValue()); - } + target.put(field.getMappedKey(), v); + }); + return target; } - private void mapMetaAttributes(Document source, @Nullable MongoPersistentEntity entity, MetaMapping metaMapping) { + private Document mapMetaAttributes(Document source, @Nullable MongoPersistentEntity entity, + MetaMapping metaMapping) { if (entity == null) { - return; + return source; } if (entity.hasTextScoreProperty() && !MetaMapping.IGNORE.equals(metaMapping)) { + + if (source == BsonUtils.EMPTY_DOCUMENT) { + source = new Document(); + } + MongoPersistentProperty textScoreProperty = entity.getTextScoreProperty(); if (MetaMapping.FORCE.equals(metaMapping) || (MetaMapping.WHEN_PRESENT.equals(metaMapping) && source.containsKey(textScoreProperty.getFieldName()))) { source.putAll(getMappedTextScoreField(textScoreProperty)); } } + + return source; } private Document filterUnwrappedObjects(Document fieldsObject, @Nullable MongoPersistentEntity entity) { @@ -679,7 +687,7 @@ protected final Entry createMapEntry(Field field, @Nullable Obje private Entry createMapEntry(String key, @Nullable Object value) { Assert.hasText(key, "Key must not be null or empty!"); - return Collections.singletonMap(key, value).entrySet().iterator().next(); + return new AbstractMap.SimpleEntry<>(key, value); } private Object createReferenceFor(Object source, MongoPersistentProperty property) { @@ -733,13 +741,13 @@ protected boolean isNestedKeyword(@Nullable Object candidate) { return false; } - Set keys = BsonUtils.asMap((Bson) candidate).keySet(); + Map map = BsonUtils.asMap((Bson) candidate); - if (keys.size() != 1) { + if (map.size() != 1) { return false; } - return isKeyword(keys.iterator().next()); + return isKeyword(map.entrySet().iterator().next().getKey()); } /** @@ -823,11 +831,14 @@ public Keyword(Bson source, String key) { public Keyword(Bson bson) { - Set keys = BsonUtils.asMap(bson).keySet(); - Assert.isTrue(keys.size() == 1, "Can only use a single value Document!"); + Map map = BsonUtils.asMap(bson); + Assert.isTrue(map.size() == 1, "Can only use a single value Document!"); + + Set> entries = map.entrySet(); + Entry entry = entries.iterator().next(); - this.key = keys.iterator().next(); - this.value = BsonUtils.get(bson, key); + this.key = entry.getKey(); + this.value = entry.getValue(); } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java index 2bfddfa2cd..d70a21707f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Meta.java @@ -49,8 +49,8 @@ private enum MetaKey { } } - private final Map values = new LinkedHashMap<>(2); - private final Set flags = new LinkedHashSet<>(); + private Map values = Collections.emptyMap(); + private Set flags = Collections.emptySet(); private Integer cursorBatchSize; private Boolean allowDiskUse; @@ -63,8 +63,9 @@ public Meta() {} * @param source */ Meta(Meta source) { - this.values.putAll(source.values); - this.flags.addAll(source.flags); + + this.values = new LinkedHashMap<>(source.values); + this.flags = new LinkedHashSet<>(source.flags); this.cursorBatchSize = source.cursorBatchSize; this.allowDiskUse = source.allowDiskUse; } @@ -158,6 +159,11 @@ public void setCursorBatchSize(int cursorBatchSize) { public boolean addFlag(CursorOption option) { Assert.notNull(option, "CursorOption must not be null!"); + + if (this.flags == Collections.EMPTY_SET) { + this.flags = new LinkedHashSet<>(2); + } + return this.flags.add(option); } @@ -220,6 +226,10 @@ void setValue(String key, @Nullable Object value) { Assert.hasText(key, "Meta key must not be 'null' or blank."); + if (values == Collections.EMPTY_MAP) { + values = new LinkedHashMap<>(2); + } + if (value == null || (value instanceof String && !StringUtils.hasText((String) value))) { this.values.remove(key); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java index 1f54e7049d..ce60798bf5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Query.java @@ -21,6 +21,7 @@ import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; @@ -30,6 +31,7 @@ import java.util.concurrent.TimeUnit; import org.bson.Document; + import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Order; @@ -52,7 +54,7 @@ public class Query { private static final String RESTRICTED_TYPES_KEY = "_$RESTRICTED_TYPES"; - private final Set> restrictedTypes = new HashSet<>(); + private Set> restrictedTypes = Collections.emptySet(); private final Map criteria = new LinkedHashMap<>(); private @Nullable Field fieldSpec = null; private Sort sort = Sort.unsorted(); @@ -235,8 +237,15 @@ public Query restrict(Class type, Class... additionalTypes) { Assert.notNull(type, "Type must not be null!"); Assert.notNull(additionalTypes, "AdditionalTypes must not be null"); + if (restrictedTypes == Collections.EMPTY_SET) { + restrictedTypes = new HashSet<>(1 + additionalTypes.length); + } + restrictedTypes.add(type); - restrictedTypes.addAll(Arrays.asList(additionalTypes)); + + if (additionalTypes.length > 0) { + restrictedTypes.addAll(Arrays.asList(additionalTypes)); + } return this; } @@ -246,6 +255,17 @@ public Query restrict(Class type, Class... additionalTypes) { */ public Document getQueryObject() { + if (criteria.isEmpty() && restrictedTypes.isEmpty()) { + return BsonUtils.EMPTY_DOCUMENT; + } + + if (criteria.size() == 1 && restrictedTypes.isEmpty()) { + + for (CriteriaDefinition definition : criteria.values()) { + return definition.getCriteriaObject(); + } + } + Document document = new Document(); for (CriteriaDefinition definition : criteria.values()) { @@ -263,7 +283,7 @@ public Document getQueryObject() { * @return the field {@link Document}. */ public Document getFieldsObject() { - return this.fieldSpec == null ? new Document() : fieldSpec.getFieldsObject(); + return this.fieldSpec == null ? BsonUtils.EMPTY_DOCUMENT : fieldSpec.getFieldsObject(); } /** @@ -272,13 +292,12 @@ public Document getFieldsObject() { public Document getSortObject() { if (this.sort.isUnsorted()) { - return new Document(); + return BsonUtils.EMPTY_DOCUMENT; } Document document = new Document(); - this.sort.stream()// - .forEach(order -> document.put(order.getProperty(), order.isAscending() ? 1 : -1)); + this.sort.forEach(order -> document.put(order.getProperty(), order.isAscending() ? 1 : -1)); return document; } @@ -557,7 +576,7 @@ public boolean isSorted() { target.limit = source.getLimit(); target.hint = source.getHint(); target.collation = source.getCollation(); - target.restrictedTypes.addAll(source.getRestrictedTypes()); + target.restrictedTypes = new HashSet<>(source.getRestrictedTypes()); if (source.getMeta().hasValues()) { target.setMeta(new Meta(source.getMeta())); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextQuery.java index 9a72b3ffc0..84a5b9d47e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextQuery.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/TextQuery.java @@ -18,6 +18,8 @@ import java.util.Locale; import org.bson.Document; + +import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.lang.Nullable; /** @@ -157,7 +159,7 @@ public Document getFieldsObject() { return super.getFieldsObject(); } - Document fields = super.getFieldsObject(); + Document fields = BsonUtils.asMutableDocument(super.getFieldsObject()); fields.put(getScoreFieldName(), META_TEXT_SCORE); return fields; @@ -170,15 +172,14 @@ public Document getFieldsObject() { @Override public Document getSortObject() { - Document sort = new Document(); - if (this.sortByScore) { + Document sort = new Document(); sort.put(getScoreFieldName(), META_TEXT_SCORE); + sort.putAll(super.getSortObject()); + return sort; } - sort.putAll(super.getSortObject()); - - return sort; + return super.getSortObject(); } /* diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Update.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Update.java index 34cab18c31..bdea768d31 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Update.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/query/Update.java @@ -56,10 +56,10 @@ public enum Position { } private boolean isolated = false; - private Set keysToUpdate = new HashSet<>(); - private Map modifierOps = new LinkedHashMap<>(); - private Map pushCommandBuilders = new LinkedHashMap<>(1); - private List arrayFilters = new ArrayList<>(); + private final Set keysToUpdate = new HashSet<>(); + private final Map modifierOps = new LinkedHashMap<>(); + private Map pushCommandBuilders = Collections.emptyMap(); + private List arrayFilters = Collections.emptyList(); /** * Static factory method to create an Update using the provided key @@ -193,6 +193,11 @@ public Update push(String key, @Nullable Object value) { public PushOperatorBuilder push(String key) { if (!pushCommandBuilders.containsKey(key)) { + + if (pushCommandBuilders == Collections.EMPTY_MAP) { + pushCommandBuilders = new LinkedHashMap<>(1); + } + pushCommandBuilders.put(key, new PushOperatorBuilder(key)); } return pushCommandBuilders.get(key); @@ -412,6 +417,10 @@ public Update isolated() { */ public Update filterArray(CriteriaDefinition criteria) { + if (arrayFilters == Collections.EMPTY_LIST) { + this.arrayFilters = new ArrayList<>(); + } + this.arrayFilters.add(criteria::getCriteriaObject); return this; } @@ -427,6 +436,10 @@ public Update filterArray(CriteriaDefinition criteria) { */ public Update filterArray(String identifier, Object expression) { + if (arrayFilters == Collections.EMPTY_LIST) { + this.arrayFilters = new ArrayList<>(); + } + this.arrayFilters.add(() -> new Document(identifier, expression)); return this; } @@ -455,6 +468,15 @@ public List getArrayFilters() { return Collections.unmodifiableList(this.arrayFilters); } + /* + * (non-Javadoc) + * @see org.springframework.data.mongodb.core.query.UpdateDefinition#hasArrayFilters() + */ + @Override + public boolean hasArrayFilters() { + return !this.arrayFilters.isEmpty(); + } + /** * This method is not called anymore rather override {@link #addMultiFieldOperation(String, String, Object)}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java index d452ad662f..c540a14603 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/BsonUtils.java @@ -60,12 +60,26 @@ */ public class BsonUtils { + /** + * The empty document (immutable). This document is serializable. + * + * @since 3.2.5 + */ + public static final Document EMPTY_DOCUMENT = new EmptyDocument(); + @SuppressWarnings("unchecked") @Nullable public static T get(Bson bson, String key) { return (T) asMap(bson).get(key); } + /** + * Return the {@link Bson} object as {@link Map}. Depending on the input type, the return value can be either a casted + * version of {@code bson} or a converted (detached from the original value). + * + * @param bson + * @return + */ public static Map asMap(Bson bson) { if (bson instanceof Document) { @@ -81,6 +95,55 @@ public static Map asMap(Bson bson) { return (Map) bson.toBsonDocument(Document.class, MongoClientSettings.getDefaultCodecRegistry()); } + /** + * Return the {@link Bson} object as {@link Document}. Depending on the input type, the return value can be either a + * casted version of {@code bson} or a converted (detached from the original value). + * + * @param bson + * @return + * @since 3.2.5 + */ + public static Document asDocument(Bson bson) { + + if (bson instanceof Document) { + return (Document) bson; + } + + Map map = asMap(bson); + + if (map instanceof Document) { + return (Document) map; + } + + return new Document(map); + } + + /** + * Return the {@link Bson} object as mutable {@link Document} containing all entries from {@link Bson}. + * + * @param bson + * @return a mutable {@link Document} containing all entries from {@link Bson}. + * @since 3.2.5 + */ + public static Document asMutableDocument(Bson bson) { + + if (bson instanceof EmptyDocument) { + bson = new Document(asDocument(bson)); + } + + if (bson instanceof Document) { + return (Document) bson; + } + + Map map = asMap(bson); + + if (map instanceof Document) { + return (Document) map; + } + + return new Document(map); + } + public static void addToMap(Bson bson, String key, @Nullable Object value) { if (bson instanceof Document) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/EmptyDocument.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/EmptyDocument.java new file mode 100644 index 0000000000..83c95c82e5 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/EmptyDocument.java @@ -0,0 +1,95 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util; + +import java.util.Collection; +import java.util.Collections; +import java.util.Map; +import java.util.Set; +import java.util.function.BiFunction; + +import org.bson.Document; +import org.jetbrains.annotations.Nullable; + +/** + * Empty variant of {@link Document}. + * + * @author Mark Paluch + */ +class EmptyDocument extends Document { + + @Override + public Document append(String key, Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public Object put(String key, Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public Object remove(Object key) { + throw new UnsupportedOperationException(); + } + + @Override + public void putAll(Map map) { + throw new UnsupportedOperationException(); + } + + @Override + public void replaceAll(BiFunction function) { + throw new UnsupportedOperationException(); + } + + @Override + public boolean remove(Object key, Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public boolean replace(String key, Object oldValue, Object newValue) { + throw new UnsupportedOperationException(); + } + + @Nullable + @Override + public Object replace(String key, Object value) { + throw new UnsupportedOperationException(); + } + + @Override + public Set> entrySet() { + return Collections.emptySet(); + } + + @Override + public Collection values() { + return Collections.emptyList(); + } + + @Override + public Set keySet() { + return Collections.emptySet(); + } + + @Override + public void clear() { + throw new UnsupportedOperationException(); + } + +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java index 147d2e49c3..b1d3d6a839 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoTemplateUnitTests.java @@ -101,6 +101,7 @@ import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; import org.springframework.data.mongodb.core.timeseries.Granularity; +import org.springframework.data.mongodb.util.BsonUtils; import org.springframework.lang.Nullable; import org.springframework.test.util.ReflectionTestUtils; import org.springframework.util.CollectionUtils; @@ -1071,7 +1072,7 @@ void doesNotApplyFieldsWhenInterfaceProjectionIsOpen() { template.doFind("star-wars", new Document(), new Document(), Person.class, PersonSpELProjection.class, CursorPreparer.NO_OP_PREPARER); - verify(findIterable).projection(eq(new Document())); + verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT)); } @Test // DATAMONGO-1733, DATAMONGO-2041 @@ -1098,7 +1099,7 @@ void doesNotApplyFieldsWhenTargetIsNotAProjection() { template.doFind("star-wars", new Document(), new Document(), Person.class, Person.class, CursorPreparer.NO_OP_PREPARER); - verify(findIterable).projection(eq(new Document())); + verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT)); } @Test // DATAMONGO-1733 @@ -1107,7 +1108,7 @@ void doesNotApplyFieldsWhenTargetExtendsDomainType() { template.doFind("star-wars", new Document(), new Document(), Person.class, PersonExtended.class, CursorPreparer.NO_OP_PREPARER); - verify(findIterable).projection(eq(new Document())); + verify(findIterable).projection(eq(BsonUtils.EMPTY_DOCUMENT)); } @Test // DATAMONGO-1348, DATAMONGO-2264 diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/QueryTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/QueryTests.java index 01dddcd084..69da412073 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/QueryTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/query/QueryTests.java @@ -237,11 +237,8 @@ void clonedQueryShouldNotDependOnCriteriaFromSource() { source.addCriteria(where("From one make ten").is("and two let be.")); Query target = Query.of(source); - compareQueries(target, source); - source.addCriteria(where("Make even three").is("then rich you'll be.")); - - assertThat(target.getQueryObject()).isEqualTo(new Document("From one make ten", "and two let be.")) - .isNotEqualTo(source.getQueryObject()); + assertThat(target.getQueryObject()).containsAllEntriesOf(new Document("From one make ten", "and two let be.")) + .isNotSameAs(source.getQueryObject()); } @Test // DATAMONGO-1783 @@ -353,9 +350,12 @@ void queryOfShouldWorkOnProxiedObjects() { private void compareQueries(Query actual, Query expected) { assertThat(actual.getCollation()).isEqualTo(expected.getCollation()); - assertThat(actual.getSortObject()).isEqualTo(expected.getSortObject()); - assertThat(actual.getFieldsObject()).isEqualTo(expected.getFieldsObject()); - assertThat(actual.getQueryObject()).isEqualTo(expected.getQueryObject()); + assertThat(actual.getSortObject()).hasSameSizeAs(expected.getSortObject()) + .containsAllEntriesOf(expected.getSortObject()); + assertThat(actual.getFieldsObject()).hasSameSizeAs(expected.getFieldsObject()) + .containsAllEntriesOf(expected.getFieldsObject()); + assertThat(actual.getQueryObject()).hasSameSizeAs(expected.getQueryObject()) + .containsAllEntriesOf(expected.getQueryObject()); assertThat(actual.getHint()).isEqualTo(expected.getHint()); assertThat(actual.getLimit()).isEqualTo(expected.getLimit()); assertThat(actual.getSkip()).isEqualTo(expected.getSkip()); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java index c6c1b140cd..9d8400995a 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/repository/query/PartTreeMongoQueryUnitTests.java @@ -31,11 +31,8 @@ import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.beans.factory.annotation.Value; -import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; import org.springframework.data.mongodb.core.MongoOperations; -import org.springframework.data.mongodb.core.convert.DbRefResolver; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; @@ -128,7 +125,7 @@ void propagatesRootExceptionForInvalidQuery() { @Test // DATAMONGO-1345, DATAMONGO-1735 void doesNotDeriveFieldSpecForNormalDomainType() { - assertThat(deriveQueryFromMethod("findPersonBy", new Object[0]).getFieldsObject()).isEqualTo(new Document()); + assertThat(deriveQueryFromMethod("findPersonBy", new Object[0]).getFieldsObject()).isEmpty(); } @Test // DATAMONGO-1345 @@ -173,7 +170,7 @@ void doesNotCreateFieldsObjectForOpenProjection() { org.springframework.data.mongodb.core.query.Query query = deriveQueryFromMethod("findAllBy"); - assertThat(query.getFieldsObject()).isEqualTo(new Document()); + assertThat(query.getFieldsObject()).isEmpty(); } @Test // DATAMONGO-1865 From d70e459ffe821c4c3b44783ed162d6461732a05c Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Fri, 10 Sep 2021 10:48:22 +0200 Subject: [PATCH 132/983] Upgrade to MongoDB Java Drivers 4.3.2 Closes: #3816 --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 4aa47bbf2b..7cb1d10f85 100644 --- a/pom.xml +++ b/pom.xml @@ -27,7 +27,7 @@ multi spring-data-mongodb 2.6.0-SNAPSHOT - 4.3.1 + 4.3.2 ${mongo} 1.19 From 7d6b5ae5fee0ec48dc07f2abce3ab6d342076635 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 10 Sep 2021 15:37:59 +0200 Subject: [PATCH 133/983] Upgrade to Maven Wrapper 3.8.2. See #3818 --- .mvn/wrapper/maven-wrapper.properties | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties index 00d32aab1d..39700a5c4b 100755 --- a/.mvn/wrapper/maven-wrapper.properties +++ b/.mvn/wrapper/maven-wrapper.properties @@ -1 +1,2 @@ -distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.5.4/apache-maven-3.5.4-bin.zip \ No newline at end of file +#Fri Sep 10 15:37:59 CEST 2021 +distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.8.2/apache-maven-3.8.2-bin.zip From e7150f525ed39ef565c56c45d9c1d965ce728c96 Mon Sep 17 00:00:00 2001 From: divyajnu08 Date: Fri, 10 Sep 2021 10:37:30 +0530 Subject: [PATCH 134/983] Fix update mapping using nested integer keys on map structures. Closes: #3775 Original Pull Request: #3815 --- .../mongodb/core/convert/QueryMapper.java | 5 ++-- .../core/convert/QueryMapperUnitTests.java | 25 +++++++++++++++++++ 2 files changed, 28 insertions(+), 2 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java index 356dd89faa..df53f2c21c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/convert/QueryMapper.java @@ -70,6 +70,7 @@ * @author Christoph Strobl * @author Mark Paluch * @author David Julia + * @author Divya Srivastava */ public class QueryMapper { @@ -1032,8 +1033,8 @@ public TypeInformation getTypeHint() { */ protected static class MetadataBackedField extends Field { - private static final Pattern POSITIONAL_PARAMETER_PATTERN = Pattern.compile("\\.\\$(\\[.*?\\])?|\\.\\d+"); - private static final Pattern DOT_POSITIONAL_PATTERN = Pattern.compile("\\.\\d+"); + private static final Pattern POSITIONAL_PARAMETER_PATTERN = Pattern.compile("\\.\\$(\\[.*?\\])?"); + private static final Pattern DOT_POSITIONAL_PATTERN = Pattern.compile("\\.\\d+(?!$)"); private static final String INVALID_ASSOCIATION_REFERENCE = "Invalid path reference %s! Associations can only be pointed to directly or via their id property!"; private final MongoPersistentEntity entity; diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index 46db6e7d6a..a54e80fa39 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -61,6 +61,7 @@ import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.TextQuery; +import org.springframework.data.mongodb.core.query.Update; import com.mongodb.BasicDBObject; import com.mongodb.MongoClientSettings; @@ -1354,6 +1355,25 @@ void mapStringIdFieldProjection() { org.bson.Document mappedFields = mapper.getMappedFields(new org.bson.Document("id", 1), context.getPersistentEntity(WithStringId.class)); assertThat(mappedFields).containsEntry("_id", 1); } + + @Test + void mapNestedStringFieldCorrectly() { + Update update = new Update(); + update.set("levelOne.a.b.d", "e"); + org.bson.Document document = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + assertThat(document).isEqualTo(new org.bson.Document("$set",new org.bson.Document("levelOne.a.b.d","e"))); + } + + @Test + void mapNestedIntegerFieldCorrectly() { + Update update = new Update(); + update.set("levelOne.0.1.3", "4"); + org.bson.Document document = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + assertThat(document).isEqualTo(new org.bson.Document("$set",new org.bson.Document("levelOne.0.1.3","4"))); + } + @Test // GH-3783 void retainsId$InWithStringArray() { @@ -1542,6 +1562,11 @@ static class EntityWithIntKeyedMapOfMap{ static class EntityWithComplexValueTypeList { List list; } + + static class EntityWithNestedMap { + Map>> levelOne; + } + static class WithExplicitTargetTypes { From eda1c793157b8883441a43c7bcc629926a7e7206 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 13 Sep 2021 14:25:17 +0200 Subject: [PATCH 135/983] Move and add tests to UpdateMapper. Also update author information. Original Pull Request: #3815 --- .../core/convert/QueryMapperUnitTests.java | 24 -------- .../core/convert/UpdateMapperUnitTests.java | 55 +++++++++++++++++++ 2 files changed, 55 insertions(+), 24 deletions(-) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java index a54e80fa39..11ea78fd4d 100755 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/QueryMapperUnitTests.java @@ -1355,25 +1355,6 @@ void mapStringIdFieldProjection() { org.bson.Document mappedFields = mapper.getMappedFields(new org.bson.Document("id", 1), context.getPersistentEntity(WithStringId.class)); assertThat(mappedFields).containsEntry("_id", 1); } - - @Test - void mapNestedStringFieldCorrectly() { - Update update = new Update(); - update.set("levelOne.a.b.d", "e"); - org.bson.Document document = mapper.getMappedObject(update.getUpdateObject(), - context.getPersistentEntity(EntityWithNestedMap.class)); - assertThat(document).isEqualTo(new org.bson.Document("$set",new org.bson.Document("levelOne.a.b.d","e"))); - } - - @Test - void mapNestedIntegerFieldCorrectly() { - Update update = new Update(); - update.set("levelOne.0.1.3", "4"); - org.bson.Document document = mapper.getMappedObject(update.getUpdateObject(), - context.getPersistentEntity(EntityWithNestedMap.class)); - assertThat(document).isEqualTo(new org.bson.Document("$set",new org.bson.Document("levelOne.0.1.3","4"))); - } - @Test // GH-3783 void retainsId$InWithStringArray() { @@ -1562,11 +1543,6 @@ static class EntityWithIntKeyedMapOfMap{ static class EntityWithComplexValueTypeList { List list; } - - static class EntityWithNestedMap { - Map>> levelOne; - } - static class WithExplicitTargetTypes { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java index bba9811e56..44712fa8d1 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/convert/UpdateMapperUnitTests.java @@ -68,6 +68,7 @@ * @author Mark Paluch * @author Pavel Vodrazka * @author David Julia + * @author Divya Srivastava */ @ExtendWith(MockitoExtension.class) class UpdateMapperUnitTests { @@ -1200,6 +1201,56 @@ void mapsObjectClassPropertyFieldInMapValueTypeAsKey() { assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"map.class\": \"value\"}}"); } + @Test // GH-3775 + void mapNestedStringFieldCorrectly() { + + Update update = new Update().set("levelOne.a.b.d", "e"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set",new org.bson.Document("levelOne.a.b.d","e"))); + } + + @Test // GH-3775 + void mapNestedIntegerFieldCorrectly() { + + Update update = new Update().set("levelOne.0.1.3", "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set",new org.bson.Document("levelOne.0.1.3","4"))); + } + + @Test // GH-3775 + void mapNestedMixedStringIntegerFieldCorrectly() { + + Update update = new Update().set("levelOne.0.1.c", "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set",new org.bson.Document("levelOne.0.1.c","4"))); + } + + @Test // GH-3775 + void mapNestedMixedStringIntegerWithStartNumberFieldCorrectly() { + + Update update = new Update().set("levelOne.0a.1b.3c", "4"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithNestedMap.class)); + + assertThat(mappedUpdate).isEqualTo(new org.bson.Document("$set",new org.bson.Document("levelOne.0a.1b.3c","4"))); + } + + @Test // GH-3688 + void multipleKeysStartingWithANumberInNestedPath() { + + Update update = new Update().set("intKeyedMap.1a.map.0b", "testing"); + Document mappedUpdate = mapper.getMappedObject(update.getUpdateObject(), + context.getPersistentEntity(EntityWithIntKeyedMap.class)); + + assertThat(mappedUpdate).isEqualTo("{\"$set\": {\"intKeyedMap.1a.map.0b\": \"testing\"}}"); + } + static class DomainTypeWrappingConcreteyTypeHavingListOfInterfaceTypeAttributes { ListModelWrapper concreteTypeWithListAttributeOfInterfaceType; } @@ -1566,4 +1617,8 @@ static class UnwrappableType { String transientValue; } + static class EntityWithNestedMap { + Map>> levelOne; + } + } From 99203b397a27f9cd595eebb028161b9c054dfd68 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 24 Aug 2021 07:06:17 +0200 Subject: [PATCH 136/983] Add support for deriving json schema for encrypted properties. This commit introduces support for creating a MongoJsonSchema containing encrypted fields for a given type based on mapping metadata. Using the Encrypted annotation allows to derive required encryptMetadata and encrypt properties within a given (mapping)context. @Document @Encrypted(keyId = "...") static class Patient { // ... @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") private Integer ssn; } MongoJsonSchemaCreator schemaCreator = MongoJsonSchemaCreator.create(mappingContext); MongoJsonSchema patientSchema = schemaCreator .filter(MongoJsonSchemaCreator.encryptedOnly()) .createSchemaFor(Patient.class); Closes: #3800 Original pull request: #3801. --- .../mongodb/core/EncryptionAlgorithms.java | 29 ++ .../core/MappingMongoJsonSchemaCreator.java | 109 ++++++- .../mongodb/core/MongoJsonSchemaCreator.java | 136 +++++++++ .../mapping/BasicMongoPersistentEntity.java | 38 +++ .../mapping/BasicMongoPersistentProperty.java | 48 ++++ .../data/mongodb/core/mapping/Encrypted.java | 112 ++++++++ .../core/mapping/MongoMappingContext.java | 6 + .../core/mapping/MongoPersistentEntity.java | 9 + .../core/mapping/MongoPersistentProperty.java | 9 + .../UnwrappedMongoPersistentEntity.java | 6 + .../UnwrappedMongoPersistentProperty.java | 6 + .../core/schema/DefaultMongoJsonSchema.java | 36 ++- .../core/schema/DocumentJsonSchema.java | 6 +- .../IdentifiableJsonSchemaProperty.java | 16 +- .../mongodb/core/schema/MongoJsonSchema.java | 32 ++- .../core/schema/TypedJsonSchemaObject.java | 4 + .../util/encryption/EncryptionUtils.java | 67 +++++ .../mongodb/util/spel/ExpressionUtils.java | 52 ++++ ...appingMongoJsonSchemaCreatorUnitTests.java | 272 +++++++++++++++++- .../asciidoc/reference/mongo-json-schema.adoc | 103 +++++++ 20 files changed, 1074 insertions(+), 22 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Encrypted.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/encryption/EncryptionUtils.java create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/spel/ExpressionUtils.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java new file mode 100644 index 0000000000..0ed7340aa1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java @@ -0,0 +1,29 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +/** + * Encryption algorithms supported by MongoDB Client Side Field Level Encryption. + * + * @author Christoph Strobl + * @since 3.3 + */ +public final class EncryptionAlgorithms { + + public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic"; + public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Random = "AEAD_AES_256_CBC_HMAC_SHA_512-Random"; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java index ecbf8a4f07..a53ff8f5a5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java @@ -20,13 +20,19 @@ import java.util.Collections; import java.util.EnumSet; import java.util.List; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import org.bson.Document; import org.springframework.data.mapping.PersistentProperty; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.Encrypted; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty; import org.springframework.data.mongodb.core.schema.JsonSchemaObject; import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; @@ -34,10 +40,12 @@ import org.springframework.data.mongodb.core.schema.MongoJsonSchema; import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import org.springframework.util.CollectionUtils; import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; /** * {@link MongoJsonSchemaCreator} implementation using both {@link MongoConverter} and {@link MappingContext} to obtain @@ -52,6 +60,7 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator { private final MongoConverter converter; private final MappingContext, MongoPersistentProperty> mappingContext; + private final Predicate filter; /** * Create a new instance of {@link MappingMongoJsonSchemaCreator}. @@ -61,10 +70,24 @@ class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator { @SuppressWarnings("unchecked") MappingMongoJsonSchemaCreator(MongoConverter converter) { + this(converter, (MappingContext, MongoPersistentProperty>) converter.getMappingContext(), + (property) -> true); + } + + @SuppressWarnings("unchecked") + MappingMongoJsonSchemaCreator(MongoConverter converter, + MappingContext, MongoPersistentProperty> mappingContext, + Predicate filter) { + Assert.notNull(converter, "Converter must not be null!"); this.converter = converter; - this.mappingContext = (MappingContext, MongoPersistentProperty>) converter - .getMappingContext(); + this.mappingContext = mappingContext; + this.filter = filter; + } + + @Override + public MongoJsonSchemaCreator filter(Predicate filter) { + return new MappingMongoJsonSchemaCreator(converter, mappingContext, filter); } /* @@ -77,11 +100,29 @@ public MongoJsonSchema createSchemaFor(Class type) { MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(type); MongoJsonSchemaBuilder schemaBuilder = MongoJsonSchema.builder(); + { + Encrypted encrypted = entity.findAnnotation(Encrypted.class); + if (encrypted != null) { + + Document encryptionMetadata = new Document(); + + Collection encryptionKeyIds = entity.getEncryptionKeyIds(); + if (!CollectionUtils.isEmpty(encryptionKeyIds)) { + encryptionMetadata.append("keyId", encryptionKeyIds); + } + + if (StringUtils.hasText(encrypted.algorithm())) { + encryptionMetadata.append("algorithm", encrypted.algorithm()); + } + + schemaBuilder.encryptionMetadata(encryptionMetadata); + } + } + List schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity); schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0])); return schemaBuilder.build(); - } private List computePropertiesForEntity(List path, @@ -93,6 +134,11 @@ private List computePropertiesForEntity(List currentPath = new ArrayList<>(path); + if (!filter.test(new PropertyContext( + currentPath.stream().map(PersistentProperty::getName).collect(Collectors.joining(".")), nested))) { + continue; + } + if (path.contains(nested)) { // cycle guard schemaProperties.add(createSchemaProperty(computePropertyFieldName(CollectionUtils.lastElement(currentPath)), Object.class, false)); @@ -120,15 +166,38 @@ private JsonSchemaProperty computeSchemaForProperty(List path, @@ -207,4 +276,30 @@ static JsonSchemaProperty createPotentiallyRequiredSchemaProperty(JsonSchemaProp return JsonSchemaProperty.required(property); } + + class PropertyContext implements JsonSchemaPropertyContext { + + private String path; + private MongoPersistentProperty property; + + public PropertyContext(String path, MongoPersistentProperty property) { + this.path = path; + this.property = property; + } + + @Override + public String getPath() { + return path; + } + + @Override + public MongoPersistentProperty getProperty() { + return property; + } + + @Override + public MongoPersistentEntity resolveEntity(MongoPersistentProperty property) { + return (MongoPersistentEntity) mappingContext.getPersistentEntity(property); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java index f3c0dcd624..5e5bc50644 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java @@ -15,7 +15,23 @@ */ package org.springframework.data.mongodb.core; +import java.util.HashSet; +import java.util.Set; +import java.util.function.Predicate; + +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.mapping.Unwrapped.Nullable; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; import org.springframework.util.Assert; @@ -46,6 +62,7 @@ * {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more * specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation. *

        + * {@link Encrypted} properties will contain {@literal encrypt} information. * * @author Christoph Strobl * @since 2.2 @@ -60,6 +77,88 @@ public interface MongoJsonSchemaCreator { */ MongoJsonSchema createSchemaFor(Class type); + /** + * Filter matching {@link JsonSchemaProperty properties}. + * + * @param filter the {@link Predicate} to evaluate for inclusion. Must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + MongoJsonSchemaCreator filter(Predicate filter); + + /** + * The context in which a specific {@link #getProperty()} is encountered during schema creation. + * + * @since 3.3 + */ + interface JsonSchemaPropertyContext { + + /** + * The path to a given field/property in dot notation. + * + * @return never {@literal null}. + */ + String getPath(); + + /** + * The current property. + * + * @return never {@literal null}. + */ + MongoPersistentProperty getProperty(); + + /** + * Obtain the {@link MongoPersistentEntity} for a given property. + * + * @param property must not be {@literal null}. + * @param + * @return {@literal null} if the property is not an entity. It is nevertheless recommend to check + * {@link PersistentProperty#isEntity()} first. + */ + @Nullable + MongoPersistentEntity resolveEntity(MongoPersistentProperty property); + + } + + /** + * A filter {@link Predicate} that matches {@link Encrypted encrypted properties} and those having nested ones. + * + * @return new instance of {@link Predicate}. + * @since 3.3 + */ + static Predicate encryptedOnly() { + + return new Predicate() { + + // cycle guard + private final Set seen = new HashSet<>(); + + @Override + public boolean test(JsonSchemaPropertyContext context) { + return extracted(context.getProperty(), context); + } + + private boolean extracted(MongoPersistentProperty property, JsonSchemaPropertyContext context) { + if (property.isAnnotationPresent(Encrypted.class)) { + return true; + } + + if (!property.isEntity() || seen.contains(property)) { + return false; + } + + seen.add(property); + + for (MongoPersistentProperty nested : context.resolveEntity(property)) { + if (extracted(nested, context)) { + return true; + } + } + return false; + } + }; + } + /** * Creates a new {@link MongoJsonSchemaCreator} that is aware of conversions applied by the given * {@link MongoConverter}. @@ -72,4 +171,41 @@ static MongoJsonSchemaCreator create(MongoConverter mongoConverter) { Assert.notNull(mongoConverter, "MongoConverter must not be null!"); return new MappingMongoJsonSchemaCreator(mongoConverter); } + + /** + * Creates a new {@link MongoJsonSchemaCreator} that is aware of type mappings and potential + * {@link org.springframework.data.spel.spi.EvaluationContextExtension extensions}. + * + * @param mappingContext must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + static MongoJsonSchemaCreator create(MappingContext mappingContext) { + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(config -> {})); + converter.afterPropertiesSet(); + + return create(converter); + } + + /** + * Creates a new {@link MongoJsonSchemaCreator} that does not consider potential extensions - suitable for testing. We + * recommend to use {@link #create(MappingContext)}. + * + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + static MongoJsonSchemaCreator create() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setSimpleTypeHolder(MongoSimpleTypes.HOLDER); + mappingContext.afterPropertiesSet(); + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(config -> {})); + converter.afterPropertiesSet(); + + return create(converter); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntity.java index 7bf8214aeb..6840fce5bf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntity.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentEntity.java @@ -17,8 +17,12 @@ import java.lang.reflect.Field; import java.lang.reflect.Modifier; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; +import java.util.List; import java.util.Map; import org.springframework.data.annotation.Id; @@ -28,6 +32,9 @@ import org.springframework.data.mapping.PropertyHandler; import org.springframework.data.mapping.model.BasicPersistentEntity; import org.springframework.data.mongodb.MongoCollectionUtils; +import org.springframework.data.mongodb.util.encryption.EncryptionUtils; +import org.springframework.data.spel.ExpressionDependencies; +import org.springframework.data.util.Lazy; import org.springframework.data.util.TypeInformation; import org.springframework.expression.EvaluationContext; import org.springframework.expression.Expression; @@ -212,6 +219,11 @@ public EvaluationContext getEvaluationContext(Object rootObject) { return super.getEvaluationContext(rootObject); } + @Override + public EvaluationContext getEvaluationContext(Object rootObject, ExpressionDependencies dependencies) { + return super.getEvaluationContext(rootObject, dependencies); + } + private void verifyFieldUniqueness() { AssertFieldNameUniquenessHandler handler = new AssertFieldNameUniquenessHandler(); @@ -360,6 +372,32 @@ private void assertUniqueness(MongoPersistentProperty property) { } } + @Override + public Collection getEncryptionKeyIds() { + + Encrypted encrypted = findAnnotation(Encrypted.class); + if (encrypted == null) { + return null; + } + + if (ObjectUtils.isEmpty(encrypted.keyId())) { + return Collections.emptySet(); + } + + Lazy evaluationContext = Lazy.of(() -> { + + EvaluationContext ctx = getEvaluationContext(null); + ctx.setVariable("target", getType().getSimpleName()); + return ctx; + }); + + List target = new ArrayList<>(); + for (String keyId : encrypted.keyId()) { + target.add(EncryptionUtils.resolveKeyId(keyId, evaluationContext)); + } + return target; + } + /** * @author Christoph Strobl * @since 1.6 diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java index 1315757896..cf74d696a8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/BasicMongoPersistentProperty.java @@ -16,7 +16,11 @@ package org.springframework.data.mongodb.core.mapping; import java.math.BigInteger; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Set; import org.bson.types.ObjectId; @@ -29,7 +33,12 @@ import org.springframework.data.mapping.model.Property; import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; import org.springframework.data.mapping.model.SimpleTypeHolder; +import org.springframework.data.mongodb.util.encryption.EncryptionUtils; +import org.springframework.data.util.Lazy; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.spel.support.StandardEvaluationContext; import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; /** @@ -299,4 +308,43 @@ public boolean isTextScoreProperty() { return isAnnotationPresent(TextScore.class); } + /** + * Obtain the {@link EvaluationContext} for a specific root object. + * + * @param rootObject can be {@literal null}. + * @return never {@literal null}. + * @since 3.3 + */ + public EvaluationContext getEvaluationContext(@Nullable Object rootObject) { + + if (getOwner() instanceof BasicMongoPersistentEntity) { + return ((BasicMongoPersistentEntity) getOwner()).getEvaluationContext(rootObject); + } + return rootObject != null ? new StandardEvaluationContext(rootObject) : new StandardEvaluationContext(); + } + + @Override + public Collection getEncryptionKeyIds() { + + Encrypted encrypted = findAnnotation(Encrypted.class); + if (encrypted == null) { + return null; + } + + if (ObjectUtils.isEmpty(encrypted.keyId())) { + return Collections.emptySet(); + } + + Lazy evaluationContext = Lazy.of(() -> { + EvaluationContext ctx = getEvaluationContext(null); + ctx.setVariable("target", getOwner().getType().getSimpleName() + "." + getName()); + return ctx; + }); + + List target = new ArrayList<>(); + for (String keyId : encrypted.keyId()) { + target.add(EncryptionUtils.resolveKeyId(keyId, evaluationContext)); + } + return target; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Encrypted.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Encrypted.java new file mode 100644 index 0000000000..8bd0f99c41 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/Encrypted.java @@ -0,0 +1,112 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.mapping; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * {@link Encrypted} provides data required for MongoDB Client Side Field Level Encryption that is applied during schema + * resolution. It can be applied on top level (typically those types annotated with {@link Document} to provide the + * {@literal encryptMetadata}. + * + *
        + * @Document
        + * @Encrypted(keyId = "4fPYFM9qSgyRAjgQ2u+IMQ==")
        + * public class Patient {
        + * 	 private ObjectId id;
        + * 	 private String name;
        + *
        + * 	 @Field("publisher_ac")
        + * 	 @DocumentReference(lookup = "{ 'acronym' : ?#{#target} }") private Publisher publisher;
        + * }
        + *
        + * "encryptMetadata": {
        + *    "keyId": [
        + *      {
        + *        "$binary": {
        + *          "base64": "4fPYFM9qSgyRAjgQ2u+IMQ==",
        + *          "subType": "04"
        + *        }
        + *      }
        + *    ]
        + *  }
        + * 
        + * + *
        + * On property level it is used for deriving field specific {@literal encrypt} settings. + * + *
        + * public class Patient {
        + * 	 private ObjectId id;
        + * 	 private String name;
        + *
        + * 	 @Encrypted(keyId = "4fPYFM9qSgyRAjgQ2u+IMQ==", algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic")
        + * 	 private String ssn;
        + * }
        + *
        + * "ssn" : {
        + *   "encrypt": {
        + *      "keyId": [
        + *        {
        + *          "$binary": {
        + *            "base64": "4fPYFM9qSgyRAjgQ2u+IMQ==",
        + *            "subType": "04"
        + *          }
        + *        }
        + *      ],
        + *      "algorithm" : "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic",
        + *      "bsonType" : "string"
        + *    }
        + *  }
        + * 
        + * + * @author Christoph Strobl + * @since 3.3 + */ +@Documented +@Retention(RetentionPolicy.RUNTIME) +@Target({ ElementType.TYPE, ElementType.FIELD }) +public @interface Encrypted { + + /** + * Get the {@code keyId} to use. The value must resolve to either the UUID representation of the key or a base64 + * encoded value representing the UUID value. + *

        + * On {@link ElementType#TYPE} level the {@link #keyId()} can be left empty if explicitly set for fields.
        + * On {@link ElementType#FIELD} level the {@link #keyId()} can be left empty if inherited from + * {@literal encryptMetadata}. + * + * @return the key id to use. May contain a parsable {@link org.springframework.expression.Expression expression}. In + * this case the {@code #target} variable will hold the target element name. + */ + String[] keyId() default {}; + + /** + * Set the algorithm to use. + *

        + * On {@link ElementType#TYPE} level the {@link #algorithm()} can be left empty if explicitly set for fields.
        + * On {@link ElementType#FIELD} level the {@link #algorithm()} can be left empty if inherited from + * {@literal encryptMetadata}. + * + * @return the encryption algorithm. + * @see org.springframework.data.mongodb.core.EncryptionAlgorithms + */ + String algorithm() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java index 121658b065..674ea74f3a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoMappingContext.java @@ -46,6 +46,9 @@ public class MongoMappingContext extends AbstractMappingContext BasicMongoPersistentEntity createPersistentEntity(TypeInformati */ @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { + + this.applicationContext = applicationContext; super.setApplicationContext(applicationContext); } @@ -145,4 +150,5 @@ public MongoPersistentEntity getPersistentEntity(MongoPersistentProperty pers return new UnwrappedMongoPersistentEntity<>(entity, new UnwrapEntityContext(persistentProperty)); } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentEntity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentEntity.java index deb69eab36..d9b5ae0bd4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentEntity.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentEntity.java @@ -15,6 +15,8 @@ */ package org.springframework.data.mongodb.core.mapping; +import java.util.Collection; + import org.springframework.data.mapping.PersistentEntity; import org.springframework.data.mapping.model.MutablePersistentEntity; import org.springframework.lang.Nullable; @@ -102,4 +104,11 @@ default boolean isUnwrapped() { return false; } + /** + * @return the resolved encryption keyIds if applicable. An empty {@link Collection} if no keyIds specified. + * {@literal null} no {@link Encrypted} annotation found. + * @since 3.3 + */ + @Nullable + Collection getEncryptionKeyIds(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java index 2bd387d74c..8dc89e03f9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/MongoPersistentProperty.java @@ -15,6 +15,8 @@ */ package org.springframework.data.mongodb.core.mapping; +import java.util.Collection; + import org.springframework.core.convert.converter.Converter; import org.springframework.data.annotation.Id; import org.springframework.data.mapping.PersistentEntity; @@ -160,6 +162,13 @@ default boolean isUnwrapped() { return isEntity() && isAnnotationPresent(Unwrapped.class); } + /** + * @return the resolved encryption keyIds if applicable. An empty {@link Collection} if no keyIds specified. + * {@literal null} no {@link Encrypted} annotation found. + * @since 3.3 + */ + Collection getEncryptionKeyIds(); + /** * Simple {@link Converter} implementation to transform a {@link MongoPersistentProperty} into its field name. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentEntity.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentEntity.java index 6a60168e91..f85c73cae0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentEntity.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentEntity.java @@ -17,6 +17,7 @@ import java.lang.annotation.Annotation; import java.util.ArrayList; +import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Spliterator; @@ -323,4 +324,9 @@ public void setEvaluationContextProvider(EvaluationContextProvider provider) { public boolean isUnwrapped() { return context.getProperty().isUnwrapped(); } + + @Override + public Collection getEncryptionKeyIds() { + return delegate.getEncryptionKeyIds(); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java index a2194c173f..24e4ae057f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/mapping/UnwrappedMongoPersistentProperty.java @@ -18,6 +18,7 @@ import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.lang.reflect.Method; +import java.util.Collection; import org.springframework.data.mapping.Association; import org.springframework.data.mapping.PersistentEntity; @@ -268,6 +269,11 @@ public boolean isUnwrapped() { return delegate.isUnwrapped(); } + @Override + public Collection getEncryptionKeyIds() { + return delegate.getEncryptionKeyIds(); + } + @Override @Nullable public Class getComponentType() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DefaultMongoJsonSchema.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DefaultMongoJsonSchema.java index 1b05840913..f77e4290ec 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DefaultMongoJsonSchema.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DefaultMongoJsonSchema.java @@ -16,7 +16,9 @@ package org.springframework.data.mongodb.core.schema; import org.bson.Document; +import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; /** * Value object representing a MongoDB-specific JSON schema which is the default {@link MongoJsonSchema} implementation. @@ -29,18 +31,44 @@ class DefaultMongoJsonSchema implements MongoJsonSchema { private final JsonSchemaObject root; + @Nullable // + private final Document encryptionMetadata; + DefaultMongoJsonSchema(JsonSchemaObject root) { + this(root, null); + } + + /** + * Create new instance of {@link DefaultMongoJsonSchema}. + * + * @param root the schema root element. + * @param encryptionMetadata can be {@literal null}. + * @since 3.3 + */ + DefaultMongoJsonSchema(JsonSchemaObject root, @Nullable Document encryptionMetadata) { + + Assert.notNull(root, "Root schema object must not be null!"); - Assert.notNull(root, "Root must not be null!"); this.root = root; + this.encryptionMetadata = encryptionMetadata; } /* * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.MongoJsonSchema#toDocument() + * @see org.springframework.data.mongodb.core.schema.MongoJsonSchema#schema() */ @Override - public Document toDocument() { - return new Document("$jsonSchema", root.toDocument()); + public Document schemaDocument() { + + Document schemaDocument = new Document(); + + // we want this to be the first element rendered, so it reads nice when printed to json + if (!CollectionUtils.isEmpty(encryptionMetadata)) { + schemaDocument.append("encryptMetadata", encryptionMetadata); + } + + schemaDocument.putAll(root.toDocument()); + + return schemaDocument; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DocumentJsonSchema.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DocumentJsonSchema.java index 2788dd59e5..787e94903a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DocumentJsonSchema.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/DocumentJsonSchema.java @@ -36,10 +36,10 @@ class DocumentJsonSchema implements MongoJsonSchema { /* * (non-Javadoc) - * @see org.springframework.data.mongodb.core.schema.MongoJsonSchema#toDocument() + * @see org.springframework.data.mongodb.core.schema.MongoJsonSchema#schema() */ @Override - public Document toDocument() { - return new Document("$jsonSchema", new Document(document)); + public Document schemaDocument() { + return new Document(document); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java index 28116e1bac..97b3cc6b46 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/IdentifiableJsonSchemaProperty.java @@ -523,6 +523,10 @@ public ObjectJsonSchemaProperty description(String description) { public ObjectJsonSchemaProperty generatedDescription() { return new ObjectJsonSchemaProperty(identifier, jsonSchemaObjectDelegate.generatedDescription()); } + + public List getProperties() { + return jsonSchemaObjectDelegate.getProperties(); + } } /** @@ -1060,7 +1064,7 @@ public static class EncryptedJsonSchemaProperty implements JsonSchemaProperty { private final JsonSchemaProperty targetProperty; private final @Nullable String algorithm; private final @Nullable String keyId; - private final @Nullable List keyIds; + private final @Nullable List keyIds; /** * Create new instance of {@link EncryptedJsonSchemaProperty} wrapping the given {@link JsonSchemaProperty target}. @@ -1072,7 +1076,7 @@ public EncryptedJsonSchemaProperty(JsonSchemaProperty target) { } private EncryptedJsonSchemaProperty(JsonSchemaProperty target, @Nullable String algorithm, @Nullable String keyId, - @Nullable List keyIds) { + @Nullable List keyIds) { Assert.notNull(target, "Target must not be null!"); this.targetProperty = target; @@ -1134,6 +1138,14 @@ public EncryptedJsonSchemaProperty keys(UUID... keyId) { return new EncryptedJsonSchemaProperty(targetProperty, algorithm, null, Arrays.asList(keyId)); } + /** + * @param keyId must not be {@literal null}. + * @return new instance of {@link EncryptedJsonSchemaProperty}. + */ + public EncryptedJsonSchemaProperty keys(Object... keyId) { + return new EncryptedJsonSchemaProperty(targetProperty, algorithm, null, Arrays.asList(keyId)); + } + /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.schema.JsonSchemaObject#toDocument() diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MongoJsonSchema.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MongoJsonSchema.java index d45dcd09ec..a14cde2d3a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MongoJsonSchema.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/MongoJsonSchema.java @@ -20,6 +20,7 @@ import org.bson.Document; import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject.ObjectJsonSchemaObject; +import org.springframework.lang.Nullable; /** * Interface defining MongoDB-specific JSON schema object. New objects can be built with {@link #builder()}, for @@ -62,13 +63,25 @@ public interface MongoJsonSchema { /** - * Create the {@link Document} containing the specified {@code $jsonSchema}.
        + * Create the {@code $jsonSchema} {@link Document} containing the specified {@link #schemaDocument()}.
        * Property and field names need to be mapped to the domain type ones by running the {@link Document} through a * {@link org.springframework.data.mongodb.core.convert.JsonSchemaMapper} to apply field name customization. * * @return never {@literal null}. */ - Document toDocument(); + default Document toDocument() { + return new Document("$jsonSchema", schemaDocument()); + } + + /** + * Create the {@link Document} defining the schema.
        + * Property and field names need to be mapped to the domain type ones by running the {@link Document} through a + * {@link org.springframework.data.mongodb.core.convert.JsonSchemaMapper} to apply field name customization. + * + * @return never {@literal null}. + * @since 3.3 + */ + Document schemaDocument(); /** * Create a new {@link MongoJsonSchema} for a given root object. @@ -108,6 +121,9 @@ class MongoJsonSchemaBuilder { private ObjectJsonSchemaObject root; + @Nullable // + private Document encryptionMetadata; + MongoJsonSchemaBuilder() { root = new ObjectJsonSchemaObject(); } @@ -266,13 +282,23 @@ public MongoJsonSchemaBuilder description(String description) { return this; } + /** + * Define the {@literal encryptMetadata} element of the schema. + * + * @param encryptionMetadata can be {@literal null}. + * @since 3.3 + */ + public void encryptionMetadata(@Nullable Document encryptionMetadata) { + this.encryptionMetadata = encryptionMetadata; + } + /** * Obtain the {@link MongoJsonSchema}. * * @return new instance of {@link MongoJsonSchema}. */ public MongoJsonSchema build() { - return MongoJsonSchema.of(root); + return new DefaultMongoJsonSchema(root, encryptionMetadata); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java index 2486e98e08..59a367a9d7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/schema/TypedJsonSchemaObject.java @@ -437,6 +437,10 @@ public ObjectJsonSchemaObject generatedDescription() { return newInstance(description, true, restrictions); } + public List getProperties() { + return properties; + } + /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.schema.JsonSchemaObject#toDocument() diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/encryption/EncryptionUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/encryption/EncryptionUtils.java new file mode 100644 index 0000000000..809f83fdc9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/encryption/EncryptionUtils.java @@ -0,0 +1,67 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.encryption; + +import java.util.UUID; +import java.util.function.Supplier; + +import org.springframework.data.mongodb.util.spel.ExpressionUtils; +import org.springframework.expression.EvaluationContext; +import org.springframework.expression.Expression; +import org.springframework.lang.Nullable; +import org.springframework.util.Assert; + +/** + * Internal utility class for dealing with encryption related matters. + * + * @author Christoph Strobl + * @since 3.3 + */ +public final class EncryptionUtils { + + /** + * Resolve a given plain {@link String} value into the store native {@literal keyId} format, considering potential + * {@link Expression expressions}.
        + * The potential keyId is probed against an {@link UUID#fromString(String) UUID value} and the {@literal base64} + * encoded {@code $binary} representation. + * + * @param value the source value to resolve the keyId for. Must not be {@literal null}. + * @param evaluationContext a {@link Supplier} used to provide the {@link EvaluationContext} in case an + * {@link Expression} is {@link ExpressionUtils#detectExpression(String) detected}. + * @return can be {@literal null}. + * @throws IllegalArgumentException if one of the required arguments is {@literal null}. + */ + @Nullable + public static Object resolveKeyId(String value, Supplier evaluationContext) { + + Assert.notNull(value, "Value must not be null!"); + + Object potentialKeyId = value; + Expression expression = ExpressionUtils.detectExpression(value); + if (expression != null) { + potentialKeyId = expression.getValue(evaluationContext.get()); + if (!(potentialKeyId instanceof String)) { + return potentialKeyId; + } + } + try { + return UUID.fromString(potentialKeyId.toString()); + } catch (IllegalArgumentException e) { + return org.bson.Document.parse("{ val : { $binary : { base64 : '" + potentialKeyId + "', subType : '04'} } }") + .get("val"); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/spel/ExpressionUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/spel/ExpressionUtils.java new file mode 100644 index 0000000000..b41961e6ea --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/util/spel/ExpressionUtils.java @@ -0,0 +1,52 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.util.spel; + +import org.springframework.expression.Expression; +import org.springframework.expression.ParserContext; +import org.springframework.expression.common.LiteralExpression; +import org.springframework.expression.spel.standard.SpelExpressionParser; +import org.springframework.lang.Nullable; +import org.springframework.util.StringUtils; + +/** + * Internal utility class for dealing with {@link Expression} and potential ones. + * + * @author Christoph Strobl + * @since 3.3 + */ +public final class ExpressionUtils { + + private static final SpelExpressionParser PARSER = new SpelExpressionParser(); + + /** + * Returns a SpEL {@link Expression} if the given {@link String} is actually an expression that does not evaluate to a + * {@link LiteralExpression} (indicating that no subsequent evaluation is necessary). + * + * @param potentialExpression can be {@literal null} + * @return can be {@literal null}. + */ + @Nullable + public static Expression detectExpression(@Nullable String potentialExpression) { + + if (!StringUtils.hasText(potentialExpression)) { + return null; + } + + Expression expression = PARSER.parseExpression(potentialExpression, ParserContext.TEMPLATE_EXPRESSION); + return expression instanceof LiteralExpression ? null : expression; + } +} diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java index 9c52bbe628..9fd19189ce 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreatorUnitTests.java @@ -19,23 +19,27 @@ import java.util.Collections; import java.util.Date; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.bson.Document; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; - +import org.springframework.context.support.GenericApplicationContext; import org.springframework.data.annotation.Transient; import org.springframework.data.convert.WritingConverter; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.Encrypted; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.FieldType; import org.springframework.data.mongodb.core.mapping.MongoId; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.spel.spi.EvaluationContextExtension; +import org.springframework.data.spel.spi.Function; /** * Unit tests for {@link MappingMongoJsonSchemaCreator}. @@ -95,6 +99,64 @@ public void converterRegistered() { "{ 'type' : 'object', 'properties' : { '_id' : { 'type' : 'object' }, 'nested' : { 'type' : 'object' } } }"); } + @Test // GH-3800 + public void csfle/*encryptedFieldsOnly*/() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // filter non encrypted fields + .createSchemaFor(Patient.class); + + Document targetSchema = schema.schemaDocument(); + assertThat(targetSchema).isEqualTo(Document.parse(PATIENT)); + } + + @Test // GH-3800 + public void csfleCyclic/*encryptedFieldsOnly*/() { + + MongoJsonSchema schema = MongoJsonSchemaCreator.create() // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // filter non encrypted fields + .createSchemaFor(Cyclic.class); + + Document targetSchema = schema.schemaDocument(); + assertThat(targetSchema).isNotNull(); + } + + @Test // GH-3800 + public void csfleWithKeyFromProperties() { + + GenericApplicationContext applicationContext = new GenericApplicationContext(); + applicationContext.registerBean("encryptionExtension", EncryptionExtension.class, () -> new EncryptionExtension()); + applicationContext.refresh(); + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setApplicationContext(applicationContext); + mappingContext.afterPropertiesSet(); + + MongoJsonSchema schema = MongoJsonSchemaCreator.create(mappingContext) // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // + .createSchemaFor(EncryptionMetadataFromProperty.class); + + assertThat(schema.schemaDocument()).isEqualTo(Document.parse(ENC_FROM_PROPERTY_SCHEMA)); + } + + @Test // GH-3800 + public void csfleWithKeyFromMethod() { + + GenericApplicationContext applicationContext = new GenericApplicationContext(); + applicationContext.registerBean("encryptionExtension", EncryptionExtension.class, () -> new EncryptionExtension()); + applicationContext.refresh(); + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setApplicationContext(applicationContext); + mappingContext.afterPropertiesSet(); + + MongoJsonSchema schema = MongoJsonSchemaCreator.create(mappingContext) // + .filter(MongoJsonSchemaCreator.encryptedOnly()) // + .createSchemaFor(EncryptionMetadataFromMethod.class); + + assertThat(schema.schemaDocument()).isEqualTo(Document.parse(ENC_FROM_METHOD_SCHEMA)); + } + // --> TYPES AND JSON // --> ENUM @@ -125,8 +187,7 @@ enum JustSomeEnum { " 'collectionProperty' : { 'type' : 'array' }," + // " 'mapProperty' : { 'type' : 'object' }," + // " 'objectProperty' : { 'type' : 'object' }," + // - " 'enumProperty' : " + JUST_SOME_ENUM + // - " }" + // + " 'enumProperty' : " + JUST_SOME_ENUM + " }" + // "}"; static class VariousFieldTypes { @@ -249,4 +310,209 @@ public org.bson.Document convert(VariousFieldTypes source) { } } + static final String PATIENT = "{" + // + " 'type': 'object'," + // + " 'encryptMetadata': {" + // + " 'keyId': [" + // + " {" + // + " '$binary': {" + // + " 'base64': 'xKVup8B1Q+CkHaVRx+qa+g=='," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " }," + // + " 'properties': {" + // + " 'ssn': {" + // + " 'encrypt': {" + // + " 'bsonType': 'int'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'" + // + " }" + // + " }," + // + " 'bloodType': {" + // + " 'encrypt': {" + // + " 'bsonType': 'string'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Random'" + // + " }" + // + " }," + // + " 'medicalRecords': {" + // + " 'encrypt': {" + // + " 'bsonType': 'array'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Random'" + // + " }" + // + " }," + // + " 'insurance': {" + // + " 'type': 'object'," + // + " 'properties': {" + // + " 'policyNumber': {" + // + " 'encrypt': {" + // + " 'bsonType': 'int'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'" + // + " }" + // + " }" + // + " }" + // + " }" + // + " }" + // + "}"; + + @Encrypted(keyId = "xKVup8B1Q+CkHaVRx+qa+g==") + static class Patient { + String name; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") // + Integer ssn; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Random") // + String bloodType; + + String keyAltNameField; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Random") // + List> medicalRecords; + + Insurance insurance; + } + + static class Insurance { + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") // + Integer policyNumber; + + String provider; + } + + static final String ENC_FROM_PROPERTY_ENTITY_KEY = "C5a5aMB7Ttq4wSJTFeRn8g=="; + static final String ENC_FROM_PROPERTY_PROPOERTY_KEY = "Mw6mdTVPQfm4quqSCLVB3g="; + static final String ENC_FROM_PROPERTY_SCHEMA = "{" + // + " 'encryptMetadata': {" + // + " 'keyId': [" + // + " {" + // + " '$binary': {" + // + " 'base64': '" + ENC_FROM_PROPERTY_ENTITY_KEY + "'," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " }," + // + " 'type': 'object'," + // + " 'properties': {" + // + " 'policyNumber': {" + // + " 'encrypt': {" + // + " 'keyId': [" + // + " [" + // + " {" + // + " '$binary': {" + // + " 'base64': '" + ENC_FROM_PROPERTY_PROPOERTY_KEY + "'," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " ]," + // + " 'bsonType': 'int'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'" + // + " }" + // + " }" + // + " }" + // + "}"; + + @Encrypted(keyId = "#{entityKey}") + static class EncryptionMetadataFromProperty { + + @Encrypted(keyId = "#{propertyKey}", algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") // + Integer policyNumber; + + String provider; + } + + static final String ENC_FROM_METHOD_ENTITY_KEY = "4fPYFM9qSgyRAjgQ2u+IMQ=="; + static final String ENC_FROM_METHOD_PROPOERTY_KEY = "+idiseKwTVCJfSKC3iUeYQ=="; + static final String ENC_FROM_METHOD_SCHEMA = "{" + // + " 'encryptMetadata': {" + // + " 'keyId': [" + // + " {" + // + " '$binary': {" + // + " 'base64': '" + ENC_FROM_METHOD_ENTITY_KEY + "'," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " }," + // + " 'type': 'object'," + // + " 'properties': {" + // + " 'policyNumber': {" + // + " 'encrypt': {" + // + " 'keyId': [" + // + " [" + // + " {" + // + " '$binary': {" + // + " 'base64': '" + ENC_FROM_METHOD_PROPOERTY_KEY + "'," + // + " 'subType': '04'" + // + " }" + // + " }" + // + " ]" + // + " ]," + // + " 'bsonType': 'int'," + // + " 'algorithm': 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'" + // + " }" + // + " }" + // + " }" + // + "}"; + + @Encrypted(keyId = "#{mongocrypt.keyId(#target)}") + static class EncryptionMetadataFromMethod { + + @Encrypted(keyId = "#{mongocrypt.keyId(#target)}", algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") // + Integer policyNumber; + + String provider; + } + + public static class EncryptionExtension implements EvaluationContextExtension { + + /* + * (non-Javadoc) + * @see org.springframework.data.spel.spi.EvaluationContextExtension#getExtensionId() + */ + @Override + public String getExtensionId() { + return "mongocrypt"; + } + + /* + * (non-Javadoc) + * @see org.springframework.data.spel.spi.EvaluationContextExtension#getProperties() + */ + @Override + public Map getProperties() { + + Map properties = new LinkedHashMap<>(); + properties.put("entityKey", ENC_FROM_PROPERTY_ENTITY_KEY); + properties.put("propertyKey", ENC_FROM_PROPERTY_PROPOERTY_KEY); + return properties; + } + + @Override + public Map getFunctions() { + try { + return Collections.singletonMap("keyId", + new Function(EncryptionExtension.class.getMethod("keyId", String.class), this)); + } catch (NoSuchMethodException e) { + e.printStackTrace(); + } + return Collections.emptyMap(); + } + + public String keyId(String target) { + + if (target.equals("EncryptionMetadataFromMethod")) { + return ENC_FROM_METHOD_ENTITY_KEY; + } + + if (target.equals("EncryptionMetadataFromMethod.policyNumber")) { + return ENC_FROM_METHOD_PROPOERTY_KEY; + } + + return "xKVup8B1Q+CkHaVRx+qa+g=="; + } + } } diff --git a/src/main/asciidoc/reference/mongo-json-schema.adoc b/src/main/asciidoc/reference/mongo-json-schema.adoc index 5a426061a2..36c85f6fb5 100644 --- a/src/main/asciidoc/reference/mongo-json-schema.adoc +++ b/src/main/asciidoc/reference/mongo-json-schema.adoc @@ -225,6 +225,109 @@ MongoJsonSchema schema = MongoJsonSchema.builder() ---- ==== +Instead of defining encrypted fields manually it is possible leverage the `@Encrypted` annotation as shown in the snippet below. + +.Client-Side Field Level Encryption via Json Schema +==== +[source,java] +---- +@Document +@Encrypted(keyId = "xKVup8B1Q+CkHaVRx+qa+g==", algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Random") <1> +static class Patient { + + @Id String id; + String name; + + @Encrypted <2> + String bloodType; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") <3> + Integer ssn; +} +---- +<1> Default encryption settings that will be set for `encryptMetadata`. +<2> Encrypted field using default encryption settings. +<3> Encrypted field overriding the default encryption algorithm. +==== + +[TIP] +==== +The `@Encrypted` Annoation supports resolving keyIds via SpEL Expressions. +To do so additional environment metadata (via the `MappingContext`) is required and must be provided. + +[source,java] +---- +@Document +@Encrypted(keyId = "#{mongocrypt.keyId(#target)}") +static class Patient { + + @Id String id; + String name; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Random") + String bloodType; + + @Encrypted(algorithm = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") + Integer ssn; +} + +MongoJsonSchemaCreator schemaCreator = MongoJsonSchemaCreator.create(mappingContext); +MongoJsonSchema patientSchema = schemaCreator + .filter(MongoJsonSchemaCreator.encryptedOnly()) + .createSchemaFor(Patient.class); +---- + +The `mongocrypt.keyId` function is defined via an `EvaluationContextExtension` as shown in the snippet below. +Providing a custom extension provides the most flexible way of computing keyIds. + +[source,java] +---- +public class EncryptionExtension implements EvaluationContextExtension { + + @Override + public String getExtensionId() { + return "mongocrypt"; + } + + @Override + public Map getFunctions() { + return Collections.singletonMap("keyId", new Function(getMethod("computeKeyId", String.class), this)); + } + + public String computeKeyId(String target) { + // ... lookup via target element name + } +} +---- + +To combine derived encryption settings with `AutoEncryptionSettings` in a Spring Boot application use the `MongoClientSettingsBuilderCustomizer`. + +[source,java] +---- +@Bean +MongoClientSettingsBuilderCustomizer customizer(MappingContext mappingContext) { + return (builder) -> { + + // ... keyVaultCollection, kmsProvider, ... + + MongoJsonSchemaCreator schemaCreator = MongoJsonSchemaCreator.create(mappingContext); + MongoJsonSchema patientSchema = schemaCreator + .filter(MongoJsonSchemaCreator.encryptedOnly()) + .createSchemaFor(Patient.class); + + AutoEncryptionSettings autoEncryptionSettings = AutoEncryptionSettings.builder() + .keyVaultNamespace(keyVaultCollection) + .kmsProviders(kmsProviders) + .extraOptions(extraOpts) + .schemaMap(Collections.singletonMap("db.patient", patientSchema.schemaDocument().toBsonDocument())) + .build(); + + builder.autoEncryptionSettings(autoEncryptionSettings); + }; +} +---- +==== + NOTE: Make sure to set the drivers `com.mongodb.AutoEncryptionSettings` to use client-side encryption. MongoDB does not support encryption for all field types. Specific data types require deterministic encryption to preserve equality comparison functionality. [[mongo.jsonSchema.types]] From 9b02897db54cbd3b9edca34d4f48c2846d903499 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Mon, 13 Sep 2021 11:12:12 +0200 Subject: [PATCH 137/983] Add configuration support for MongoDB ServerApiVersion. Introduce FactoryBean and required options to set the ServerAPI. Update namespace xsd and parsing. Closes: #3820 Original pull request: #3821. --- .../mongodb/config/MongoParsingUtils.java | 17 + .../core/MongoClientSettingsFactoryBean.java | 17 +- .../core/MongoServerApiFactoryBean.java | 92 ++ .../main/resources/META-INF/spring.schemas | 6 +- .../data/mongodb/config/spring-mongo-3.3.xsd | 895 ++++++++++++++++++ .../config/MongoClientNamespaceTests.java | 13 + .../core/MongoServerApiFactoryBeanTests.java | 73 ++ .../MongoClientNamespaceTests-context.xml | 5 + 8 files changed, 1114 insertions(+), 4 deletions(-) create mode 100644 spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java create mode 100644 spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.3.xsd create mode 100644 spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java index cd4d16d91b..935be95500 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java @@ -22,9 +22,12 @@ import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.CustomEditorConfigurer; import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.support.BeanDefinitionValidationException; import org.springframework.beans.factory.support.ManagedMap; import org.springframework.beans.factory.xml.BeanDefinitionParser; import org.springframework.data.mongodb.core.MongoClientSettingsFactoryBean; +import org.springframework.data.mongodb.core.MongoServerApiFactoryBean; +import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; import org.w3c.dom.Element; @@ -112,6 +115,20 @@ public static boolean parseMongoClientSettings(Element element, BeanDefinitionBu // Field level encryption setPropertyReference(clientOptionsDefBuilder, settingsElement, "encryption-settings-ref", "autoEncryptionSettings"); + // ServerAPI + if (StringUtils.hasText(settingsElement.getAttribute("server-api-version"))) { + + MongoServerApiFactoryBean serverApiFactoryBean = new MongoServerApiFactoryBean(); + serverApiFactoryBean.setVersion(settingsElement.getAttribute("server-api-version")); + try { + clientOptionsDefBuilder.addPropertyValue("serverApi", serverApiFactoryBean.getObject()); + } catch (Exception exception) { + throw new BeanDefinitionValidationException("Non parsable server-api.", exception); + } + } else { + setPropertyReference(clientOptionsDefBuilder, settingsElement, "server-api-ref", "serverApi"); + } + // and the rest mongoClientBuilder.addPropertyValue("mongoClientSettings", clientOptionsDefBuilder.getBeanDefinition()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java index 162035a45d..818dd45f3f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java @@ -36,6 +36,7 @@ import com.mongodb.ReadConcern; import com.mongodb.ReadPreference; import com.mongodb.ServerAddress; +import com.mongodb.ServerApi; import com.mongodb.WriteConcern; import com.mongodb.connection.ClusterConnectionMode; import com.mongodb.connection.ClusterType; @@ -113,6 +114,7 @@ public class MongoClientSettingsFactoryBean extends AbstractFactoryBean getObjectType() { return MongoClientSettings.class; @@ -476,9 +487,11 @@ protected MongoClientSettings createInstance() { if (retryWrites != null) { builder = builder.retryWrites(retryWrites); } - if (uUidRepresentation != null) { - builder.uuidRepresentation(uUidRepresentation); + builder = builder.uuidRepresentation(uUidRepresentation); + } + if (serverApi != null) { + builder = builder.serverApi(serverApi); } return builder.build(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java new file mode 100644 index 0000000000..e2a2fecaec --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java @@ -0,0 +1,92 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.beans.factory.FactoryBean; +import org.springframework.lang.Nullable; +import org.springframework.util.ObjectUtils; + +import com.mongodb.ServerApi; +import com.mongodb.ServerApi.Builder; +import com.mongodb.ServerApiVersion; + +/** + * {@link FactoryBean} for creating {@link ServerApi} using the {@link ServerApi.Builder}. + * + * @author Christoph Strobl + * @since 3.3 + */ +public class MongoServerApiFactoryBean implements FactoryBean { + + private String version; + private @Nullable Boolean deprecationErrors; + private @Nullable Boolean strict; + + /** + * @param version the version string either as the enum name or the server version value. + * @see ServerApiVersion + */ + public void setVersion(String version) { + this.version = version; + } + + /** + * @param deprecationErrors + * @see ServerApi.Builder#deprecationErrors(boolean) + */ + public void setDeprecationErrors(@Nullable Boolean deprecationErrors) { + this.deprecationErrors = deprecationErrors; + } + + /** + * @param strict + * @see ServerApi.Builder#strict(boolean) + */ + public void setStrict(@Nullable Boolean strict) { + this.strict = strict; + } + + @Nullable + @Override + public ServerApi getObject() throws Exception { + + Builder builder = ServerApi.builder().version(version()); + + if (deprecationErrors != null) { + builder = builder.deprecationErrors(deprecationErrors); + } + if (strict != null) { + builder = builder.strict(strict); + } + return builder.build(); + } + + @Nullable + @Override + public Class getObjectType() { + return ServerApi.class; + } + + private ServerApiVersion version() { + try { + // lookup by name eg. 'V1' + return ObjectUtils.caseInsensitiveValueOf(ServerApiVersion.values(), version); + } catch (IllegalArgumentException e) { + // or just the version number, eg. just '1' + return ServerApiVersion.findByValue(version); + } + } +} diff --git a/spring-data-mongodb/src/main/resources/META-INF/spring.schemas b/spring-data-mongodb/src/main/resources/META-INF/spring.schemas index 1ebb3098c7..c7f3f0ab7b 100644 --- a/spring-data-mongodb/src/main/resources/META-INF/spring.schemas +++ b/spring-data-mongodb/src/main/resources/META-INF/spring.schemas @@ -11,7 +11,8 @@ http\://www.springframework.org/schema/data/mongo/spring-mongo-1.10.2.xsd=org/sp http\://www.springframework.org/schema/data/mongo/spring-mongo-2.0.xsd=org/springframework/data/mongodb/config/spring-mongo-2.0.xsd http\://www.springframework.org/schema/data/mongo/spring-mongo-2.2.xsd=org/springframework/data/mongodb/config/spring-mongo-2.0.xsd http\://www.springframework.org/schema/data/mongo/spring-mongo-3.0.xsd=org/springframework/data/mongodb/config/spring-mongo-3.0.xsd -http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-3.0.xsd +http\://www.springframework.org/schema/data/mongo/spring-mongo-3.3.xsd=org/springframework/data/mongodb/config/spring-mongo-3.3.xsd +http\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-3.3.xsd https\://www.springframework.org/schema/data/mongo/spring-mongo-1.0.xsd=org/springframework/data/mongodb/config/spring-mongo-1.0.xsd https\://www.springframework.org/schema/data/mongo/spring-mongo-1.1.xsd=org/springframework/data/mongodb/config/spring-mongo-1.1.xsd https\://www.springframework.org/schema/data/mongo/spring-mongo-1.2.xsd=org/springframework/data/mongodb/config/spring-mongo-1.2.xsd @@ -25,4 +26,5 @@ https\://www.springframework.org/schema/data/mongo/spring-mongo-1.10.2.xsd=org/s https\://www.springframework.org/schema/data/mongo/spring-mongo-2.0.xsd=org/springframework/data/mongodb/config/spring-mongo-2.0.xsd https\://www.springframework.org/schema/data/mongo/spring-mongo-2.2.xsd=org/springframework/data/mongodb/config/spring-mongo-2.2.xsd https\://www.springframework.org/schema/data/mongo/spring-mongo-3.0.xsd=org/springframework/data/mongodb/config/spring-mongo-3.0.xsd -https\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-3.0.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo-3.3.xsd=org/springframework/data/mongodb/config/spring-mongo-3.3.xsd +https\://www.springframework.org/schema/data/mongo/spring-mongo.xsd=org/springframework/data/mongodb/config/spring-mongo-3.3.xsd diff --git a/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.3.xsd b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.3.xsd new file mode 100644 index 0000000000..80811306f1 --- /dev/null +++ b/spring-data-mongodb/src/main/resources/org/springframework/data/mongodb/config/spring-mongo-3.3.xsd @@ -0,0 +1,895 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDatabaseFactory for a DB object + + + + + + + + + + + + + + The reference to a MongoTemplate. Will default to 'mongoTemplate'. + + + + + + + Enables creation of indexes for queries that get derived from the method name + and thus reference domain class properties. Defaults to false. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + The reference to a MongoTypeMapper to be used by this MappingMongoConverter. + + + + + + + The reference to a MappingContext. Will default to 'mappingContext'. + + + + + + + Disables JSR-303 validation on MongoDB documents before they are saved. By default it is set to false. + + + + + + + + + + Enables abbreviating the field names for domain class properties to the + first character of their camel case names, e.g. fooBar -> fb. Defaults to false. + + + + + + + + + + The reference to a FieldNamingStrategy. + + + + + + + Enable/Disable index creation for annotated properties/entities. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A reference to a custom converter. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + The WriteConcern that will be the default value used when asking the MongoDatabaseFactory for a DB object + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The reference to a MongoDatabaseFactory. + + + + + + + + + + + + + + + + diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java index 47dd85e07a..abdd00c2b5 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java @@ -21,6 +21,7 @@ import java.util.Collections; import java.util.concurrent.TimeUnit; +import com.mongodb.ServerApiVersion; import org.bson.UuidRepresentation; import org.junit.Test; import org.junit.runner.RunWith; @@ -147,4 +148,16 @@ public void clientWithUUidSettings() { MongoClientSettings settings = (MongoClientSettings) getField(factoryBean, "mongoClientSettings"); assertThat(settings.getUuidRepresentation()).isEqualTo(UuidRepresentation.STANDARD); } + + @Test // DATAMONGO-2427 + public void clientWithServerVersion() { + + assertThat(ctx.containsBean("client-with-server-api-settings")).isTrue(); + MongoClientFactoryBean factoryBean = ctx.getBean("&client-with-server-api-settings", MongoClientFactoryBean.class); + + MongoClientSettings settings = (MongoClientSettings) getField(factoryBean, "mongoClientSettings"); + assertThat(settings.getServerApi()).isNotNull().satisfies(it -> { + assertThat(it.getVersion()).isEqualTo(ServerApiVersion.V1); + }); + } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java new file mode 100644 index 0000000000..0c79478fee --- /dev/null +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java @@ -0,0 +1,73 @@ +/* + * Copyright 2021 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import static org.assertj.core.api.Assertions.*; + +import com.mongodb.ServerApi; +import com.mongodb.ServerApiVersion; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; +import org.springframework.beans.factory.support.RootBeanDefinition; +import org.springframework.test.util.ReflectionTestUtils; + +import com.mongodb.AutoEncryptionSettings; + +/** + * Integration tests for {@link MongoServerApiFactoryBean}. + * + * @author Christoph Strobl + */ +public class MongoServerApiFactoryBeanTests { + + @Test // DATAMONGO-2306 + public void createsServerApiForVersionString() { + + RootBeanDefinition definition = new RootBeanDefinition(MongoServerApiFactoryBean.class); + definition.getPropertyValues().addPropertyValue("version", "V1"); + definition.getPropertyValues().addPropertyValue("deprecationErrors", "true"); + + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.registerBeanDefinition("factory", definition); + + MongoServerApiFactoryBean bean = factory.getBean("&factory", MongoServerApiFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "deprecationErrors")).isEqualTo(true); + + ServerApi target = factory.getBean(ServerApi.class); + assertThat(target.getVersion()).isEqualTo(ServerApiVersion.V1); + assertThat(target.getDeprecationErrors()).contains(true); + assertThat(target.getStrict()).isNotPresent(); + } + + @Test // DATAMONGO-2306 + public void createsServerApiForVersionNumber() { + + RootBeanDefinition definition = new RootBeanDefinition(MongoServerApiFactoryBean.class); + definition.getPropertyValues().addPropertyValue("version", "1"); + definition.getPropertyValues().addPropertyValue("strict", "true"); + + DefaultListableBeanFactory factory = new DefaultListableBeanFactory(); + factory.registerBeanDefinition("factory", definition); + + MongoServerApiFactoryBean bean = factory.getBean("&factory", MongoServerApiFactoryBean.class); + assertThat(ReflectionTestUtils.getField(bean, "strict")).isEqualTo(true); + + ServerApi target = factory.getBean(ServerApi.class); + assertThat(target.getVersion()).isEqualTo(ServerApiVersion.V1); + assertThat(target.getDeprecationErrors()).isNotPresent(); + assertThat(target.getStrict()).contains(true); + } +} diff --git a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoClientNamespaceTests-context.xml b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoClientNamespaceTests-context.xml index 1bd3aa2a05..79e5ac40a0 100644 --- a/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoClientNamespaceTests-context.xml +++ b/spring-data-mongodb/src/test/resources/org/springframework/data/mongodb/config/MongoClientNamespaceTests-context.xml @@ -41,4 +41,9 @@ + + + + + From 0af8d6839e965b04717aa17f483a70af13c23a52 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 14 Sep 2021 09:11:32 +0200 Subject: [PATCH 138/983] Polishing. Reformat code, fix ticket references in tests. See #3820 Original pull request: #3821. --- .../mongodb/core/MongoServerApiFactoryBean.java | 2 +- .../config/MongoClientNamespaceTests.java | 5 +++-- .../core/MongoServerApiFactoryBeanTests.java | 16 ++++++++-------- 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java index e2a2fecaec..c93016b097 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java @@ -35,7 +35,7 @@ public class MongoServerApiFactoryBean implements FactoryBean { private @Nullable Boolean deprecationErrors; private @Nullable Boolean strict; - /** + /** * @param version the version string either as the enum name or the server version value. * @see ServerApiVersion */ diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java index abdd00c2b5..127e3d1022 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/MongoClientNamespaceTests.java @@ -21,10 +21,10 @@ import java.util.Collections; import java.util.concurrent.TimeUnit; -import com.mongodb.ServerApiVersion; import org.bson.UuidRepresentation; import org.junit.Test; import org.junit.runner.RunWith; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationContext; import org.springframework.data.mongodb.core.MongoClientFactoryBean; @@ -35,6 +35,7 @@ import com.mongodb.MongoClientSettings; import com.mongodb.MongoCredential; import com.mongodb.ServerAddress; +import com.mongodb.ServerApiVersion; import com.mongodb.connection.ClusterType; /** @@ -149,7 +150,7 @@ public void clientWithUUidSettings() { assertThat(settings.getUuidRepresentation()).isEqualTo(UuidRepresentation.STANDARD); } - @Test // DATAMONGO-2427 + @Test // GH-3820 public void clientWithServerVersion() { assertThat(ctx.containsBean("client-with-server-api-settings")).isTrue(); diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java index 0c79478fee..d584b6cfb3 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBeanTests.java @@ -17,24 +17,24 @@ import static org.assertj.core.api.Assertions.*; -import com.mongodb.ServerApi; -import com.mongodb.ServerApiVersion; import org.junit.jupiter.api.Test; + import org.springframework.beans.factory.support.DefaultListableBeanFactory; import org.springframework.beans.factory.support.RootBeanDefinition; import org.springframework.test.util.ReflectionTestUtils; -import com.mongodb.AutoEncryptionSettings; +import com.mongodb.ServerApi; +import com.mongodb.ServerApiVersion; /** * Integration tests for {@link MongoServerApiFactoryBean}. * * @author Christoph Strobl */ -public class MongoServerApiFactoryBeanTests { +class MongoServerApiFactoryBeanTests { - @Test // DATAMONGO-2306 - public void createsServerApiForVersionString() { + @Test // GH-3820 + void createsServerApiForVersionString() { RootBeanDefinition definition = new RootBeanDefinition(MongoServerApiFactoryBean.class); definition.getPropertyValues().addPropertyValue("version", "V1"); @@ -52,8 +52,8 @@ public void createsServerApiForVersionString() { assertThat(target.getStrict()).isNotPresent(); } - @Test // DATAMONGO-2306 - public void createsServerApiForVersionNumber() { + @Test // GH-3820 + void createsServerApiForVersionNumber() { RootBeanDefinition definition = new RootBeanDefinition(MongoServerApiFactoryBean.class); definition.getPropertyValues().addPropertyValue("version", "1"); From 8f00ffd29158ff2294a08f8e4ec3c5e8ab45e9ab Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Wed, 15 Sep 2021 15:30:10 +0200 Subject: [PATCH 139/983] Change visibility of PersistentEntitiesFactoryBean. Closes: #3825 --- .../data/mongodb/config/PersistentEntitiesFactoryBean.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java index ba382a32cc..29d606c4de 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java @@ -28,7 +28,7 @@ * @author Christoph Strobl * @since 3.1 */ -class PersistentEntitiesFactoryBean implements FactoryBean { +public class PersistentEntitiesFactoryBean implements FactoryBean { private final MappingMongoConverter converter; From 38e1d0d92deb9a2d0009abf335e67b6fef74f491 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 17 Sep 2021 09:44:34 +0200 Subject: [PATCH 140/983] Prepare 3.3 M3 (2021.1.0). See #3771 --- pom.xml | 8 ++++---- src/main/resources/notice.txt | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/pom.xml b/pom.xml index 7cb1d10f85..e1ecd121e4 100644 --- a/pom.xml +++ b/pom.xml @@ -15,7 +15,7 @@ org.springframework.data.build spring-data-parent - 2.6.0-SNAPSHOT + 2.6.0-M3 @@ -26,7 +26,7 @@ multi spring-data-mongodb - 2.6.0-SNAPSHOT + 2.6.0-M3 4.3.2 ${mongo} 1.19 @@ -134,8 +134,8 @@ - spring-libs-snapshot - https://repo.spring.io/libs-snapshot + spring-libs-milestone + https://repo.spring.io/libs-milestone sonatype-libs-snapshot diff --git a/src/main/resources/notice.txt b/src/main/resources/notice.txt index 29628c3570..ceef18ae5b 100644 --- a/src/main/resources/notice.txt +++ b/src/main/resources/notice.txt @@ -1,4 +1,4 @@ -Spring Data MongoDB 3.3 M2 (2021.1.0) +Spring Data MongoDB 3.3 M3 (2021.1.0) Copyright (c) [2010-2019] Pivotal Software, Inc. This product is licensed to you under the Apache License, Version 2.0 (the "License"). @@ -27,5 +27,6 @@ conditions of the subcomponent's license, as noted in the LICENSE file. + From 00350edd3265b3d81c1551e7ffb90fa1a72c9fd6 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 17 Sep 2021 09:44:56 +0200 Subject: [PATCH 141/983] Release version 3.3 M3 (2021.1.0). See #3771 --- pom.xml | 2 +- spring-data-mongodb-benchmarks/pom.xml | 2 +- spring-data-mongodb-distribution/pom.xml | 2 +- spring-data-mongodb/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index e1ecd121e4..b63985f7c6 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M3 pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index 0033bd11d5..ac1428bdf5 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M3 ../pom.xml diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index f62c8dc7f4..f3e3c3d92e 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -14,7 +14,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M3 ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 2f73c10eba..6a959b228d 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-SNAPSHOT + 3.3.0-M3 ../pom.xml From 715ae26f3ccb3842f25abdd698295e365aa1b898 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 17 Sep 2021 09:52:18 +0200 Subject: [PATCH 142/983] Prepare next development iteration. See #3771 --- pom.xml | 2 +- spring-data-mongodb-benchmarks/pom.xml | 2 +- spring-data-mongodb-distribution/pom.xml | 2 +- spring-data-mongodb/pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index b63985f7c6..e1ecd121e4 100644 --- a/pom.xml +++ b/pom.xml @@ -5,7 +5,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M3 + 3.3.0-SNAPSHOT pom Spring Data MongoDB diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml index ac1428bdf5..0033bd11d5 100644 --- a/spring-data-mongodb-benchmarks/pom.xml +++ b/spring-data-mongodb-benchmarks/pom.xml @@ -7,7 +7,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M3 + 3.3.0-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index f3e3c3d92e..f62c8dc7f4 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -14,7 +14,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M3 + 3.3.0-SNAPSHOT ../pom.xml diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index 6a959b228d..2f73c10eba 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -11,7 +11,7 @@ org.springframework.data spring-data-mongodb-parent - 3.3.0-M3 + 3.3.0-SNAPSHOT ../pom.xml From b7ffff47694ba4066eadc8a7359696766b53bc8e Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Fri, 17 Sep 2021 09:52:21 +0200 Subject: [PATCH 143/983] After release cleanups. See #3771 --- pom.xml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index e1ecd121e4..7cb1d10f85 100644 --- a/pom.xml +++ b/pom.xml @@ -15,7 +15,7 @@ org.springframework.data.build spring-data-parent - 2.6.0-M3 + 2.6.0-SNAPSHOT @@ -26,7 +26,7 @@ multi spring-data-mongodb - 2.6.0-M3 + 2.6.0-SNAPSHOT 4.3.2 ${mongo} 1.19 @@ -134,8 +134,8 @@ - spring-libs-milestone - https://repo.spring.io/libs-milestone + spring-libs-snapshot + https://repo.spring.io/libs-snapshot sonatype-libs-snapshot From 63d9875576beddd0651c6d7c777f54829dbb6aa1 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 6 Jul 2021 07:54:31 +0200 Subject: [PATCH 144/983] Update test for MongoDB Server 5.0. Update assertions for changed return types, add a bit of think time and disable tests for no longer supported features. See #3696 Original pull request: #3753. --- .../config/AbstractIntegrationTests.java | 15 ++++++------- .../core/geo/GeoSpatialIndexTests.java | 8 ++++--- .../core/index/IndexingIntegrationTests.java | 6 ++++- .../DefaultMessageListenerContainerTests.java | 22 ++++++++++++------- .../mongodb/test/util/MongoTestUtils.java | 17 ++++++++++++++ 5 files changed, 48 insertions(+), 20 deletions(-) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractIntegrationTests.java index 701e2eb986..00a4e9d935 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/config/AbstractIntegrationTests.java @@ -21,10 +21,9 @@ import java.util.Set; import org.bson.Document; -import org.junit.After; -import org.junit.Before; -import org.junit.runner.RunWith; - +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; import org.springframework.dao.DataAccessException; @@ -32,7 +31,7 @@ import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringRunner; +import org.springframework.test.context.junit.jupiter.SpringExtension; import com.mongodb.MongoException; import com.mongodb.client.MongoClient; @@ -41,7 +40,7 @@ /** * @author Oliver Gierke */ -@RunWith(SpringRunner.class) +@ExtendWith(SpringExtension.class) @ContextConfiguration public abstract class AbstractIntegrationTests { @@ -71,8 +70,8 @@ protected boolean autoIndexCreation() { @Autowired MongoOperations operations; - @Before - @After + @BeforeEach + @AfterEach public void cleanUp() { for (String collectionName : operations.getCollectionNames()) { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatialIndexTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatialIndexTests.java index 449c78f225..10984e0a1c 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatialIndexTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/geo/GeoSpatialIndexTests.java @@ -21,8 +21,8 @@ import java.util.List; import java.util.Map; -import org.junit.Before; -import org.junit.Test; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DataAccessException; import org.springframework.data.geo.Point; @@ -35,6 +35,7 @@ import org.springframework.data.mongodb.core.index.IndexInfo; import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.mapping.Document; +import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; import com.mongodb.MongoException; import com.mongodb.WriteConcern; @@ -52,7 +53,7 @@ public class GeoSpatialIndexTests extends AbstractIntegrationTests { @Autowired private MongoTemplate template; - @Before + @BeforeEach public void setUp() { template.setWriteConcern(WriteConcern.JOURNALED); @@ -82,6 +83,7 @@ public void test2dSphereIndex() { } @Test // DATAMONGO-778 + @EnableIfMongoServerVersion(isLessThan = "5.0") public void testHaystackIndex() { try { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexingIntegrationTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexingIntegrationTests.java index 2c61b0fdbf..7c731a37c4 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexingIntegrationTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/index/IndexingIntegrationTests.java @@ -155,7 +155,11 @@ public void evaluatesTimeoutSpelExpresssionWithBeanReference() { }); assertThat(indexInfo).isPresent(); - assertThat(indexInfo.get()).containsEntry("expireAfterSeconds", 11L); + assertThat(indexInfo.get()).hasEntrySatisfying("expireAfterSeconds", timeout -> { + + // MongoDB 5 returns int not long + assertThat(timeout).isIn(11, 11L); + }); } @Target({ ElementType.FIELD }) diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerTests.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerTests.java index b973de0cf1..c47918c565 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerTests.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/messaging/DefaultMessageListenerContainerTests.java @@ -38,13 +38,16 @@ import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.messaging.ChangeStreamRequest.ChangeStreamRequestOptions; import org.springframework.data.mongodb.core.messaging.SubscriptionRequest.RequestOptions; +import org.springframework.data.mongodb.test.util.Client; import org.springframework.data.mongodb.test.util.EnableIfMongoServerVersion; import org.springframework.data.mongodb.test.util.EnableIfReplicaSetAvailable; import org.springframework.data.mongodb.test.util.MongoServerCondition; import org.springframework.data.mongodb.test.util.MongoTemplateExtension; +import org.springframework.data.mongodb.test.util.MongoTestUtils; import org.springframework.data.mongodb.test.util.Template; import org.springframework.util.ErrorHandler; +import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.model.CreateCollectionOptions; import com.mongodb.client.model.changestream.ChangeStreamDocument; @@ -60,9 +63,12 @@ public class DefaultMessageListenerContainerTests { static final String DATABASE_NAME = "change-stream-events"; static final String COLLECTION_NAME = "collection-1"; static final String COLLECTION_2_NAME = "collection-2"; + static final String COLLECTION_3_NAME = "collection-3"; static final Duration TIMEOUT = Duration.ofSeconds(2); + @Client static MongoClient client; + @Template(database = DATABASE_NAME, initialEntitySet = Person.class) // static MongoTemplate template; @@ -74,10 +80,13 @@ public class DefaultMessageListenerContainerTests { private CollectingMessageListener messageListener; @BeforeEach - void beforeEach() { + void beforeEach() throws InterruptedException { + + MongoTestUtils.dropCollectionNow(DATABASE_NAME, COLLECTION_NAME, client); + MongoTestUtils.dropCollectionNow(DATABASE_NAME, COLLECTION_2_NAME, client); + MongoTestUtils.dropCollectionNow(DATABASE_NAME, COLLECTION_3_NAME, client); - template.dropCollection(COLLECTION_NAME); - template.dropCollection(COLLECTION_2_NAME); + Thread.sleep(100); messageListener = new CollectingMessageListener<>(); } @@ -281,7 +290,7 @@ public void abortsSubscriptionOnError() throws InterruptedException { @Test // DATAMONGO-1803 public void callsDefaultErrorHandlerOnError() throws InterruptedException { - dbFactory.getMongoDatabase().createCollection(COLLECTION_NAME, + dbFactory.getMongoDatabase().createCollection(COLLECTION_3_NAME, new CreateCollectionOptions().capped(true).maxDocuments(10000).sizeInBytes(10000)); collection.insertOne(new Document("_id", "id-1").append("value", "foo")); @@ -298,10 +307,7 @@ public void callsDefaultErrorHandlerOnError() throws InterruptedException { Document.class); SubscriptionUtils.awaitSubscription(subscription); - - template.dropCollection(COLLECTION_NAME); - - Thread.sleep(20); + dbFactory.getMongoDatabase().drop(); verify(errorHandler, atLeast(1)).handleError(any(DataAccessException.class)); } finally { diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestUtils.java b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestUtils.java index 5a41e8a68c..7e29d2a272 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestUtils.java +++ b/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/test/util/MongoTestUtils.java @@ -166,6 +166,23 @@ public static void dropCollectionNow(String dbName, String collectionName, .verifyComplete(); } + /** + * Create a {@link com.mongodb.client.MongoCollection} if it does not exist, or drop and recreate it if it does and + * verify operation result. + * + * @param dbName must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param client must not be {@literal null}. + */ + public static void dropCollectionNow(String dbName, String collectionName, + com.mongodb.client.MongoClient client) { + + com.mongodb.client.MongoDatabase database = client.getDatabase(dbName) + .withWriteConcern(WriteConcern.MAJORITY).withReadPreference(ReadPreference.primary()); + + database.getCollection(collectionName).drop(); + } + /** * Remove all documents from the {@link MongoCollection} with given name in the according {@link MongoDatabase * database}. From 2f208d712ca2353c45529132ba88e3e9b4c339eb Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 6 Jul 2021 09:28:32 +0200 Subject: [PATCH 145/983] Update CI to cover MongoDB Server 5.0. MongoDB has alpha releases in a slightly different location on their distribution server. And they use different keys for signing these alpha releases compared to the overall package listing. Closes #3696 Original pull request: #3753. --- Jenkinsfile | 40 ++++++++++++++++++++++++++++++ ci/openjdk8-mongodb-5.0/Dockerfile | 17 +++++++++++++ 2 files changed, 57 insertions(+) create mode 100644 ci/openjdk8-mongodb-5.0/Dockerfile diff --git a/Jenkinsfile b/Jenkinsfile index 1eb84755a5..1ee5ed5c5f 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -14,6 +14,22 @@ pipeline { stages { stage("Docker images") { parallel { + stage('Publish JDK 8 + MongoDB 5.0') { + when { + changeset "ci/openjdk8-mongodb-5.0/**" + } + agent { label 'data' } + options { timeout(time: 30, unit: 'MINUTES') } + + steps { + script { + def image = docker.build("springci/spring-data-openjdk8-with-mongodb-5.0.0", "ci/openjdk8-mongodb-5.0/") + docker.withRegistry('', 'hub.docker.com-springbuildmaster') { + image.push() + } + } + } + } stage('Publish JDK 8 + MongoDB 4.0') { when { changeset "ci/openjdk8-mongodb-4.0/**" @@ -151,6 +167,30 @@ pipeline { } } + stage("test: mongodb 5.0 (jdk8)") { + agent { + label 'data' + } + options { timeout(time: 30, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c') + } + steps { + script { + docker.withRegistry('', 'hub.docker.com-springbuildmaster') { + docker.image('springci/spring-data-openjdk8-with-mongodb-5.0.0:latest').inside('-v $HOME:/tmp/jenkins-home') { + sh 'mkdir -p /tmp/mongodb/db /tmp/mongodb/log' + sh 'mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log &' + sh 'sleep 10' + sh 'mongo --eval "rs.initiate({_id: \'rs0\', members:[{_id: 0, host: \'127.0.0.1:27017\'}]});"' + sh 'sleep 15' + sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml clean dependency:list test -Duser.name=jenkins -Dsort -U -B' + } + } + } + } + } + stage("test: baseline (jdk16)") { agent { label 'data' diff --git a/ci/openjdk8-mongodb-5.0/Dockerfile b/ci/openjdk8-mongodb-5.0/Dockerfile new file mode 100644 index 0000000000..658b615bbd --- /dev/null +++ b/ci/openjdk8-mongodb-5.0/Dockerfile @@ -0,0 +1,17 @@ +FROM adoptopenjdk/openjdk8:latest + +ENV TZ=Etc/UTC +ENV DEBIAN_FRONTEND=noninteractive + +RUN set -eux; \ + apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget ; \ + # MongoDB 5.0 release signing key + apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv B00A0BD1E2C63C11 ; \ + # Needed when MongoDB creates a 5.0 folder. + echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/5.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-5.0.list; \ + echo ${TZ} > /etc/timezone; + +RUN apt-get update; \ + apt-get install -y mongodb-org=5.0.0 mongodb-org-server=5.0.0 mongodb-org-shell=5.0.0 mongodb-org-mongos=5.0.0 mongodb-org-tools=5.0.0; \ + apt-get clean; \ + rm -rf /var/lib/apt/lists/*; From 7f585382925f4a40153221e4039268cf4be40c97 Mon Sep 17 00:00:00 2001 From: Mark Paluch Date: Tue, 21 Sep 2021 15:16:12 +0200 Subject: [PATCH 146/983] Use HTTPS in Dockerfiles for package download. See #3696 Original pull request: #3753. --- ci/openjdk11-mongodb-4.4/Dockerfile | 3 +++ ci/openjdk16-mongodb-4.4/Dockerfile | 5 ++++- ci/openjdk8-mongodb-4.0/Dockerfile | 3 +++ ci/openjdk8-mongodb-4.4/Dockerfile | 3 +++ ci/openjdk8-mongodb-5.0/Dockerfile | 5 ++++- 5 files changed, 17 insertions(+), 2 deletions(-) diff --git a/ci/openjdk11-mongodb-4.4/Dockerfile b/ci/openjdk11-mongodb-4.4/Dockerfile index 6c94ac38ff..7de227c4d9 100644 --- a/ci/openjdk11-mongodb-4.4/Dockerfile +++ b/ci/openjdk11-mongodb-4.4/Dockerfile @@ -4,6 +4,9 @@ ENV TZ=Etc/UTC ENV DEBIAN_FRONTEND=noninteractive RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/http/https/g' /etc/apt/sources.list ; \ apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \ apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \ echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \ diff --git a/ci/openjdk16-mongodb-4.4/Dockerfile b/ci/openjdk16-mongodb-4.4/Dockerfile index 7a1e47cf00..5f49272c4a 100644 --- a/ci/openjdk16-mongodb-4.4/Dockerfile +++ b/ci/openjdk16-mongodb-4.4/Dockerfile @@ -1,9 +1,12 @@ -FROM adoptopenjdk/openjdk16:latest +FROM adoptopenjdk/openjdk16:latest ENV TZ=Etc/UTC ENV DEBIAN_FRONTEND=noninteractive RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/http/https/g' /etc/apt/sources.list ; \ apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \ apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \ echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \ diff --git a/ci/openjdk8-mongodb-4.0/Dockerfile b/ci/openjdk8-mongodb-4.0/Dockerfile index e05068ab32..bb75ccfc14 100644 --- a/ci/openjdk8-mongodb-4.0/Dockerfile +++ b/ci/openjdk8-mongodb-4.0/Dockerfile @@ -4,6 +4,9 @@ ENV TZ=Etc/UTC ENV DEBIAN_FRONTEND=noninteractive RUN RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/http/https/g' /etc/apt/sources.list ; \ apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \ apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 ; \ echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.0.list; \ diff --git a/ci/openjdk8-mongodb-4.4/Dockerfile b/ci/openjdk8-mongodb-4.4/Dockerfile index 79774dd269..f9a814533b 100644 --- a/ci/openjdk8-mongodb-4.4/Dockerfile +++ b/ci/openjdk8-mongodb-4.4/Dockerfile @@ -4,6 +4,9 @@ ENV TZ=Etc/UTC ENV DEBIAN_FRONTEND=noninteractive RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/http/https/g' /etc/apt/sources.list ; \ apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 ; \ apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv 656408E390CFB1F5 ; \ echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu bionic/mongodb-org/4.4 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-4.4.list; \ diff --git a/ci/openjdk8-mongodb-5.0/Dockerfile b/ci/openjdk8-mongodb-5.0/Dockerfile index 658b615bbd..53509efd05 100644 --- a/ci/openjdk8-mongodb-5.0/Dockerfile +++ b/ci/openjdk8-mongodb-5.0/Dockerfile @@ -4,6 +4,9 @@ ENV TZ=Etc/UTC ENV DEBIAN_FRONTEND=noninteractive RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list; \ + sed -i -e 's/http/https/g' /etc/apt/sources.list ; \ apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget ; \ # MongoDB 5.0 release signing key apt-key adv --keyserver hkps://keyserver.ubuntu.com:443 --recv B00A0BD1E2C63C11 ; \ @@ -12,6 +15,6 @@ RUN set -eux; \ echo ${TZ} > /etc/timezone; RUN apt-get update; \ - apt-get install -y mongodb-org=5.0.0 mongodb-org-server=5.0.0 mongodb-org-shell=5.0.0 mongodb-org-mongos=5.0.0 mongodb-org-tools=5.0.0; \ + apt-get install -y mongodb-org=5.0.3 mongodb-org-server=5.0.3 mongodb-org-shell=5.0.3 mongodb-org-mongos=5.0.3 mongodb-org-tools=5.0.3; \ apt-get clean; \ rm -rf /var/lib/apt/lists/*; From 9e2f6055a3917b8f9927859f28b389765eb2bd68 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Tue, 6 Jul 2021 10:40:10 +0200 Subject: [PATCH 147/983] Refine CI job triggers. See #3696 Original pull request: #3753. --- Jenkinsfile | 38 ++++++-------------------------------- 1 file changed, 6 insertions(+), 32 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 1ee5ed5c5f..a7e2d38bb9 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -83,8 +83,9 @@ pipeline { stage("test: baseline (jdk8)") { when { + beforeAgent(true) anyOf { - branch 'main' + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") not { triggeredBy 'UpstreamCause' } } } @@ -113,8 +114,9 @@ pipeline { stage("Test other configurations") { when { + beforeAgent(true) allOf { - branch 'main' + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") not { triggeredBy 'UpstreamCause' } } } @@ -219,8 +221,9 @@ pipeline { stage('Release to artifactory') { when { + beforeAgent(true) anyOf { - branch 'main' + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") not { triggeredBy 'UpstreamCause' } } } @@ -250,35 +253,6 @@ pipeline { } } } - - stage('Publish documentation') { - when { - branch 'main' - } - agent { - label 'data' - } - options { timeout(time: 20, unit: 'MINUTES') } - - environment { - ARTIFACTORY = credentials('02bd1690-b54f-4c9f-819d-a77cb7a9822c') - } - - steps { - script { - docker.withRegistry('', 'hub.docker.com-springbuildmaster') { - docker.image('adoptopenjdk/openjdk8:latest').inside('-v $HOME:/tmp/jenkins-home') { - sh 'MAVEN_OPTS="-Duser.name=jenkins -Duser.home=/tmp/jenkins-home" ./mvnw -s settings.xml -Pci,distribute ' + - '-Dartifactory.server=https://repo.spring.io ' + - "-Dartifactory.username=${ARTIFACTORY_USR} " + - "-Dartifactory.password=${ARTIFACTORY_PSW} " + - "-Dartifactory.distribution-repository=temp-private-local " + - '-Dmaven.test.skip=true clean deploy -U -B' - } - } - } - } - } } post { From 2f98a6656bb17b5f909a39d4bf5df8554864ce28 Mon Sep 17 00:00:00 2001 From: Christoph Strobl Date: Fri, 24 Sep 2021 10:35:38 +0200 Subject: [PATCH 148/983] Fix javadoc errors and warnings Closes: #3835 --- .../data/mongodb/BindableMongoExpression.java | 2 +- .../data/mongodb/MongoCollectionUtils.java | 4 +- .../data/mongodb/MongoDatabaseUtils.java | 10 +- .../data/mongodb/MongoExpression.java | 2 +- .../data/mongodb/MongoResourceHolder.java | 2 +- .../data/mongodb/MongoTransactionManager.java | 16 +- .../mongodb/ReactiveMongoDatabaseUtils.java | 10 +- .../mongodb/ReactiveMongoResourceHolder.java | 2 +- .../ReactiveMongoTransactionManager.java | 12 +- .../SessionAwareMethodInterceptor.java | 2 +- .../data/mongodb/SpringDataMongoDB.java | 2 +- .../config/MongoConfigurationSupport.java | 3 +- .../mongodb/core/ChangeStreamOptions.java | 4 +- .../data/mongodb/core/CollectionOptions.java | 2 +- .../mongodb/core/ExecutableFindOperation.java | 2 +- .../core/ExecutableUpdateOperation.java | 2 +- .../mongodb/core/FindAndReplaceOptions.java | 2 +- .../core/MongoDatabaseFactorySupport.java | 2 +- .../mongodb/core/MongoDbFactorySupport.java | 2 +- .../mongodb/core/MongoJsonSchemaCreator.java | 3 +- .../data/mongodb/core/MongoOperations.java | 129 +++++++------ .../data/mongodb/core/MongoTemplate.java | 6 +- .../core/ReactiveChangeStreamOperation.java | 2 +- .../mongodb/core/ReactiveFindOperation.java | 6 +- .../mongodb/core/ReactiveMongoOperations.java | 181 +++++++++--------- .../mongodb/core/ReactiveMongoTemplate.java | 6 +- .../mongodb/core/ReactiveSessionCallback.java | 2 +- .../mongodb/core/ReactiveSessionScoped.java | 4 +- .../data/mongodb/core/ScriptOperations.java | 6 +- .../data/mongodb/core/SessionCallback.java | 2 +- .../data/mongodb/core/SessionScoped.java | 6 +- .../mongodb/core/aggregation/Aggregation.java | 4 +- .../AggregationSpELExpression.java | 4 +- .../core/aggregation/AggregationUpdate.java | 3 +- .../core/aggregation/ArithmeticOperators.java | 34 ++-- .../core/aggregation/BucketAutoOperation.java | 6 +- .../core/aggregation/BucketOperation.java | 3 +- .../core/aggregation/ConvertOperators.java | 4 +- .../core/aggregation/CountOperation.java | 3 +- .../core/aggregation/DateOperators.java | 6 +- .../aggregation/GraphLookupOperation.java | 3 +- .../core/aggregation/GroupOperation.java | 2 +- ...DelegatingAggregationOperationContext.java | 2 +- .../core/aggregation/RedactOperation.java | 3 +- .../core/aggregation/ScriptOperators.java | 36 ++-- .../aggregation/SetWindowFieldsOperation.java | 3 +- .../aggregation/SortByCountOperation.java | 7 +- .../core/aggregation/UnionWithOperation.java | 2 +- .../core/convert/MappingMongoConverter.java | 2 +- .../mongodb/core/convert/QueryMapper.java | 2 +- .../data/mongodb/core/geo/GeoJsonModule.java | 2 +- .../mongodb/core/index/CompoundIndex.java | 2 +- .../mongodb/core/index/DurationStyle.java | 2 +- .../data/mongodb/core/index/HashIndexed.java | 2 +- .../mongodb/core/index/WildcardIndex.java | 4 +- .../data/mongodb/core/mapping/Encrypted.java | 4 +- .../data/mongodb/core/mapping/Field.java | 2 +- .../data/mongodb/core/mapping/FieldType.java | 2 +- .../data/mongodb/core/mapping/ShardKey.java | 2 +- .../data/mongodb/core/mapping/Sharded.java | 6 +- .../data/mongodb/core/mapping/TimeSeries.java | 3 +- .../data/mongodb/core/mapping/Unwrapped.java | 4 +- .../core/mapping/event/AfterDeleteEvent.java | 2 +- .../core/messaging/ChangeStreamRequest.java | 14 +- .../DefaultMessageListenerContainer.java | 2 +- .../messaging/MessageListenerContainer.java | 24 +-- .../mongodb/core/messaging/Subscription.java | 4 +- .../core/messaging/TailableCursorRequest.java | 6 +- .../data/mongodb/core/query/Collation.java | 2 +- .../data/mongodb/core/query/Criteria.java | 4 +- .../data/mongodb/core/query/NearQuery.java | 2 +- .../mongodb/core/schema/JsonSchemaObject.java | 2 +- .../mongodb/core/script/NamedMongoScript.java | 4 +- .../data/mongodb/repository/Aggregation.java | 26 +-- .../data/mongodb/repository/Query.java | 12 +- .../repository/query/MongoQueryCreator.java | 2 +- 76 files changed, 346 insertions(+), 360 deletions(-) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java index 982f683d53..ac735be37f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java @@ -31,7 +31,7 @@ * expression. The expression will be wrapped within { ... } if necessary. The actual parsing and parameter * binding of placeholders like {@code ?0} is delayed upon first call on the the target {@link Document} via * {@link #toDocument()}. - *

        + *
        * *

          * $toUpper : $name                -> { '$toUpper' : '$name' }
        diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java
        index 3d85a33dcb..1b796eabd2 100644
        --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java
        +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java
        @@ -20,8 +20,8 @@
         
         /**
          * Helper class featuring helper methods for working with MongoDb collections.
        - * 

        - *

        + *
        + *
        * Mainly intended for internal use within the framework. * * @author Thomas Risberg diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java index c9342ec4f6..f0b6c2228a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java @@ -30,7 +30,7 @@ * Helper class for managing a {@link MongoDatabase} instances via {@link MongoDatabaseFactory}. Used for obtaining * {@link ClientSession session bound} resources, such as {@link MongoDatabase} and * {@link com.mongodb.client.MongoCollection} suitable for transactional usage. - *

        + *
        * Note: Intended for internal usage only. * * @author Christoph Strobl @@ -43,7 +43,7 @@ public class MongoDatabaseUtils { /** * Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory} using * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. - *

        + *
        * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -56,7 +56,7 @@ public static MongoDatabase getDatabase(MongoDatabaseFactory factory) { /** * Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory}. - *

        + *
        * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -71,7 +71,7 @@ public static MongoDatabase getDatabase(MongoDatabaseFactory factory, SessionSyn /** * Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory} using * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. - *

        + *
        * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -85,7 +85,7 @@ public static MongoDatabase getDatabase(@Nullable String dbName, MongoDatabaseFa /** * Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory}. - *

        + *
        * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java index 541118b114..2ea38af67f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java @@ -18,7 +18,7 @@ /** * Wrapper object for MongoDB expressions like {@code $toUpper : $name} that manifest as {@link org.bson.Document} when * passed on to the driver. - *

        + *
        * A set of predefined {@link MongoExpression expressions}, including a * {@link org.springframework.data.mongodb.core.aggregation.AggregationSpELExpression SpEL based variant} for method * like expressions (eg. {@code toUpper(name)}) are available via the diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java index 90a3b32023..157489e11c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java @@ -24,7 +24,7 @@ /** * MongoDB specific {@link ResourceHolderSupport resource holder}, wrapping a {@link ClientSession}. * {@link MongoTransactionManager} binds instances of this class to the thread. - *

        + *
        * Note: Intended for internal usage only. * * @author Christoph Strobl diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java index 1e6013d73d..d244da6296 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java @@ -37,18 +37,18 @@ /** * A {@link org.springframework.transaction.PlatformTransactionManager} implementation that manages * {@link ClientSession} based transactions for a single {@link MongoDatabaseFactory}. - *

        + *
        * Binds a {@link ClientSession} from the specified {@link MongoDatabaseFactory} to the thread. - *

        + *
        * {@link TransactionDefinition#isReadOnly() Readonly} transactions operate on a {@link ClientSession} and enable causal * consistency, and also {@link ClientSession#startTransaction() start}, {@link ClientSession#commitTransaction() * commit} or {@link ClientSession#abortTransaction() abort} a transaction. - *

        + *
        * Application code is required to retrieve the {@link com.mongodb.client.MongoDatabase} via * {@link MongoDatabaseUtils#getDatabase(MongoDatabaseFactory)} instead of a standard * {@link MongoDatabaseFactory#getMongoDatabase()} call. Spring classes such as * {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly. - *

        + *
        * By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. One may override * {@link #doCommit(MongoTransactionObject)} to implement the * Retry Commit Operation @@ -69,11 +69,11 @@ public class MongoTransactionManager extends AbstractPlatformTransactionManager /** * Create a new {@link MongoTransactionManager} for bean-style usage. - *

        + *
        * Note:The {@link MongoDatabaseFactory db factory} has to be * {@link #setDbFactory(MongoDatabaseFactory) set} before using the instance. Use this constructor to prepare a * {@link MongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}. - *

        + *
        * Optionally it is possible to set default {@link TransactionOptions transaction options} defining * {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}. * @@ -212,8 +212,8 @@ protected final void doCommit(DefaultTransactionStatus status) throws Transactio * By default those labels are ignored, nevertheless one might check for * {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the * commit.
        + *

         	 * 
        -	 *     
         	 * int retries = 3;
         	 * do {
         	 *     try {
        @@ -226,8 +226,8 @@ protected final void doCommit(DefaultTransactionStatus status) throws Transactio
         	 *     }
         	 *     Thread.sleep(500);
         	 * } while (--retries > 0);
        -	 *     
        *
        + *
        * * @param transactionObject never {@literal null}. * @throws Exception in case of transaction errors. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java index 4699ac56c2..4ae9e227f1 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java @@ -36,7 +36,7 @@ * Helper class for managing reactive {@link MongoDatabase} instances via {@link ReactiveMongoDatabaseFactory}. Used for * obtaining {@link ClientSession session bound} resources, such as {@link MongoDatabase} and {@link MongoCollection} * suitable for transactional usage. - *

        + *
        * Note: Intended for internal usage only. * * @author Mark Paluch @@ -75,7 +75,7 @@ public static Mono isTransactionActive(ReactiveMongoDatabaseFactory dat /** * Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory} using * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. - *

        + *
        * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -88,7 +88,7 @@ public static Mono getDatabase(ReactiveMongoDatabaseFactory facto /** * Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory}. - *

        + *
        * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -104,7 +104,7 @@ public static Mono getDatabase(ReactiveMongoDatabaseFactory facto /** * Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory * factory} using {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. - *

        + *
        * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * @@ -119,7 +119,7 @@ public static Mono getDatabase(String dbName, ReactiveMongoDataba /** * Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory * factory}. - *

        + *
        * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java index b1f1c06d08..b3338fd7ba 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java @@ -24,7 +24,7 @@ /** * MongoDB specific resource holder, wrapping a {@link ClientSession}. {@link ReactiveMongoTransactionManager} binds * instances of this class to the subscriber context. - *

        + *
        * Note: Intended for internal usage only. * * @author Mark Paluch diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java index 63706eff8a..711af76f53 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java @@ -38,21 +38,21 @@ * A {@link org.springframework.transaction.ReactiveTransactionManager} implementation that manages * {@link com.mongodb.reactivestreams.client.ClientSession} based transactions for a single * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory}. - *

        + *
        * Binds a {@link ClientSession} from the specified * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory} to the subscriber * {@link reactor.util.context.Context}. - *

        + *
        * {@link org.springframework.transaction.TransactionDefinition#isReadOnly() Readonly} transactions operate on a * {@link ClientSession} and enable causal consistency, and also {@link ClientSession#startTransaction() start}, * {@link com.mongodb.reactivestreams.client.ClientSession#commitTransaction() commit} or * {@link ClientSession#abortTransaction() abort} a transaction. - *

        + *
        * Application code is required to retrieve the {@link com.mongodb.reactivestreams.client.MongoDatabase} via * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory)} instead * of a standard {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()} call. Spring * classes such as {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate} use this strategy implicitly. - *

        + *
        * By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. You can override * {@link #doCommit(TransactionSynchronizationManager, ReactiveMongoTransactionObject)} to implement the * Retry Commit Operation @@ -71,11 +71,11 @@ public class ReactiveMongoTransactionManager extends AbstractReactiveTransaction /** * Create a new {@link ReactiveMongoTransactionManager} for bean-style usage. - *

        + *
        * Note:The {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory db factory} has to * be {@link #setDatabaseFactory(ReactiveMongoDatabaseFactory)} set} before using the instance. Use this constructor * to prepare a {@link ReactiveMongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}. - *

        + *
        * Optionally it is possible to set default {@link TransactionOptions transaction options} defining * {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java index da48f22154..b9b2c88130 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java @@ -35,7 +35,7 @@ /** * {@link MethodInterceptor} implementation looking up and invoking an alternative target method having * {@link ClientSession} as its first argument. This allows seamless integration with the existing code base. - *

        + *
        * The {@link MethodInterceptor} is aware of methods on {@code MongoCollection} that my return new instances of itself * like (eg. {@link com.mongodb.reactivestreams.client.MongoCollection#withWriteConcern(WriteConcern)} and decorate them * if not already proxied. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java index dbbf146fc1..808b576bcb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java @@ -48,7 +48,7 @@ public static MongoDriverInformation driverInformation() { /** * Fetches the "Implementation-Version" manifest attribute from the jar file. - *

        + *
        * Note that some ClassLoaders do not expose the package metadata, hence this class might not be able to determine the * version in all environments. In this case the current Major version is returned as a fallback. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java index 52ec72d171..5fe0c4fe4e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java @@ -172,8 +172,7 @@ protected Set> scanForEntities(String basePackage) throws ClassNotFound /** * Configures whether to abbreviate field names for domain objects by configuring a - * {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. For advanced - * customization needs, consider overriding {@link #mappingMongoConverter()}. + * {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. * * @return */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java index a4f6f7e226..3fe6767533 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java @@ -242,13 +242,13 @@ public ChangeStreamOptionsBuilder collation(Collation collation) { /** * Set the filter to apply. - *

        + *
        * Fields on aggregation expression root level are prefixed to map to fields contained in * {@link ChangeStreamDocument#getFullDocument() fullDocument}. However {@literal operationType}, {@literal ns}, * {@literal documentKey} and {@literal fullDocument} are reserved words that will be omitted, and therefore taken * as given, during the mapping procedure. You may want to have a look at the * structure of Change Events. - *

        + *
        * Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to ensure filter expressions are * mapped to domain type fields. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java index edff52bb74..f866896694 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java @@ -428,7 +428,7 @@ public Optional getValidationLevel() { /** * Get the {@code validationAction} to perform. * - * @return @return {@link Optional#empty()} if not set. + * @return {@link Optional#empty()} if not set. */ public Optional getValidationAction() { return Optional.ofNullable(validationAction); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java index d67212bdc6..f41af5c6c5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java @@ -125,7 +125,7 @@ default Optional first() { /** * Get the number of matching elements. - *

        + *
        * This method uses an {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) aggregation * execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees shard, * session and transaction compliance. In case an inaccurate count satisfies the applications needs use diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java index a8b58669e3..32b7017e41 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java @@ -89,7 +89,7 @@ default Optional findAndModify() { /** * Trigger - * findOneAndReplace + * findOneAndReplace * execution by calling one of the terminating methods. * * @author Mark Paluch diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java index 6122837a27..42a8a3ef77 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java @@ -17,7 +17,7 @@ /** * Options for - * findOneAndReplace. + * findOneAndReplace. *
        * Defaults to *

        diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java index dac4b0d6d7..9c8419a154 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java @@ -33,7 +33,7 @@ /** * Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as * database name and exception translator. - *

        + *
        * Not intended to be used directly. * * @author Christoph Strobl diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDbFactorySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDbFactorySupport.java index bc0e39bbc9..ba530d502f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDbFactorySupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDbFactorySupport.java @@ -20,7 +20,7 @@ /** * Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as * database name and exception translator. - *

        + *
        * Not intended to be used directly. * * @author Christoph Strobl diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java index 5e5bc50644..f5b620d0fb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java @@ -40,6 +40,7 @@ * following mapping rules. *

        * Required Properties + *

        *
          *
        • Properties of primitive type
        • *
        @@ -61,7 +62,7 @@ * {@link org.springframework.data.annotation.Id _id} properties using types that can be converted into * {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more * specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation. - *

        + * {@link Encrypted} properties will contain {@literal encrypt} information. * * @author Christoph Strobl diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java index e4a4b0868f..c015fb5a49 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java @@ -58,7 +58,7 @@ * Interface that specifies a basic set of MongoDB operations. Implemented by {@link MongoTemplate}. Not often used but * a useful option for extensibility and testability (as it can be easily mocked, stubbed, or be the target of a JDK * proxy). - *

        + *
        * NOTE: Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB * specific documentation to learn more about Multi * Document Transactions. @@ -125,7 +125,7 @@ public interface MongoOperations extends FluentMongoOperations { /** * Executes a {@link DbCallback} translating any exceptions as necessary. - *

        + *
        * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not @@ -138,7 +138,7 @@ public interface MongoOperations extends FluentMongoOperations { /** * Executes the given {@link CollectionCallback} on the entity collection of the specified class. - *

        + *
        * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param entityClass class that determines the collection to use. Must not be {@literal null}. @@ -151,7 +151,7 @@ public interface MongoOperations extends FluentMongoOperations { /** * Executes the given {@link CollectionCallback} on the collection of the given name. - *

        + *
        * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be @@ -176,7 +176,7 @@ public interface MongoOperations extends FluentMongoOperations { /** * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession} * provided by the given {@link Supplier} to each and every command issued against MongoDB. - *

        + *
        * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use the * {@link SessionScoped#execute(SessionCallback, Consumer)} hook to potentially close the {@link ClientSession}. * @@ -212,7 +212,7 @@ public T execute(SessionCallback action, Consumer onComple /** * Obtain a {@link ClientSession} bound instance of {@link MongoOperations}. - *

        + *
        * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. * * @param session must not be {@literal null}. @@ -300,7 +300,7 @@ public T execute(SessionCallback action, Consumer onComple * is created on first interaction with the server. Collections can be explicitly created via * {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class) * exists} first. - *

        + *
        * Translate any exceptions as necessary. * * @param collectionName name of the collection. Must not be {@literal null}. @@ -310,7 +310,7 @@ public T execute(SessionCallback action, Consumer onComple /** * Check to see if a collection with a name indicated by the entity class exists. - *

        + *
        * Translate any exceptions as necessary. * * @param entityClass class that determines the name of the collection. Must not be {@literal null}. @@ -320,7 +320,7 @@ public T execute(SessionCallback action, Consumer onComple /** * Check to see if a collection with a given name exists. - *

        + *
        * Translate any exceptions as necessary. * * @param collectionName name of the collection. Must not be {@literal null}. @@ -330,7 +330,7 @@ public T execute(SessionCallback action, Consumer onComple /** * Drop the collection with the name indicated by the entity class. - *

        + *
        * Translate any exceptions as necessary. * * @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}. @@ -339,7 +339,7 @@ public T execute(SessionCallback action, Consumer onComple /** * Drop the collection with the given name. - *

        + *
        * Translate any exceptions as necessary. * * @param collectionName name of the collection to drop/delete. @@ -403,10 +403,10 @@ public T execute(SessionCallback action, Consumer onComple /** * Query for a list of objects of type T from the collection used by the entity class. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -417,10 +417,10 @@ public T execute(SessionCallback action, Consumer onComple /** * Query for a list of objects of type T from the specified collection. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -539,11 +539,11 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN /** * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. - *

        + *
        * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that * needs to be closed. The raw results will be mapped to the given entity class and are returned as stream. The name * of the inputCollection is derived from the inputType of the aggregation. - *

        + *
        * Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. * @@ -557,10 +557,10 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN /** * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. - *

        + *
        * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that * needs to be closed. The raw results will be mapped to the given entity class. - *

        + *
        * Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. * @@ -576,10 +576,10 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN /** * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. - *

        + *
        * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.client.AggregateIterable} that * needs to be closed. The raw results will be mapped to the given entity class. - *

        + *
        * Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. * @@ -702,10 +702,10 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin /** * Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the * specified type. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -720,10 +720,10 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified * type. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -768,10 +768,10 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin /** * Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -784,10 +784,10 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin /** * Map the results of an ad-hoc query on the specified collection to a List of the specified type. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -881,7 +881,7 @@ default List findDistinct(Query query, String field, String collection, C } /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. * * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional @@ -897,7 +897,7 @@ default List findDistinct(Query query, String field, String collection, C T findAndModify(Query query, UpdateDefinition update, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. * * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional @@ -914,7 +914,7 @@ default List findDistinct(Query query, String field, String collection, C T findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. * @@ -934,7 +934,7 @@ default List findDistinct(Query query, String field, String collection, C T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. * @@ -957,7 +957,7 @@ T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions o /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} * document.
        * The collection name is derived from the {@literal replacement} type.
        @@ -977,7 +977,7 @@ default T findAndReplace(Query query, T replacement) { /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} * document.
        * Options are defaulted to {@link FindAndReplaceOptions#empty()}.
        @@ -997,7 +997,7 @@ default T findAndReplace(Query query, T replacement, String collectionName) /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
        * NOTE: The replacement entity must not hold an {@literal id}. @@ -1018,7 +1018,7 @@ default T findAndReplace(Query query, T replacement, FindAndReplaceOptions o /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
        * NOTE: The replacement entity must not hold an {@literal id}. @@ -1041,7 +1041,7 @@ default T findAndReplace(Query query, T replacement, FindAndReplaceOptions o /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
        * NOTE: The replacement entity must not hold an {@literal id}. @@ -1066,7 +1066,7 @@ default T findAndReplace(Query query, T replacement, FindAndReplaceOptions o /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
        * NOTE: The replacement entity must not hold an {@literal id}. @@ -1094,7 +1094,7 @@ default T findAndReplace(Query query, S replacement, FindAndReplaceOption /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
        * NOTE: The replacement entity must not hold an {@literal id}. @@ -1120,9 +1120,9 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option * Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the * specified type. The first document that matches the query is returned and also removed from the collection in the * database. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. - *

        + *
        * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -1137,10 +1137,10 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified * type. The first document that matches the query is returned and also removed from the collection in the database. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -1160,7 +1160,7 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to * count all matches. - *

        + *
        * This method uses an * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees @@ -1182,7 +1182,7 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to * count all matches. - *

        + *
        * This method uses an * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees @@ -1199,7 +1199,7 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option /** * Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type}, * based on collection statistics. - *

        + *
        * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside * transactions. * @@ -1215,7 +1215,7 @@ default long estimatedCount(Class entityClass) { /** * Estimate the number of documents in the given collection based on collection statistics. - *

        + *
        * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside * transactions. * @@ -1232,7 +1232,7 @@ default long estimatedCount(Class entityClass) { * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to * count all matches. - *

        + *
        * This method uses an * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees @@ -1249,17 +1249,17 @@ default long estimatedCount(Class entityClass) { /** * Insert the object into the collection for the entity type of the object to save. - *

        + *
        * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. - *

        + *
        * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See * Spring's * Type Conversion" for more details. - *

        + *
        * Insert is used to initially store the object into the database. To update an existing object use the save method. - *

        + *
        * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. @@ -1270,12 +1270,12 @@ default long estimatedCount(Class entityClass) { /** * Insert the object into the specified collection. - *

        + *
        * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * Insert is used to initially store the object into the database. To update an existing object use the save method. - *

        + *
        * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. @@ -1315,16 +1315,16 @@ default long estimatedCount(Class entityClass) { /** * Save the object to the collection for the entity type of the object to save. This will perform an insert if the * object is not already present, that is an 'upsert'. - *

        + *
        * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See * Spring's * Type Conversion" for more details. - *

        + *
        * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. @@ -1336,16 +1336,15 @@ default long estimatedCount(Class entityClass) { /** * Save the object to the specified collection. This will perform an insert if the object is not already present, that * is an 'upsert'. - *

        + *
        * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's Type - * Conversion" for more details. - *

        + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. + * See Spring's Type Conversion for more details. + *
        * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java index fb0780c5c8..b3fb915687 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java @@ -338,7 +338,7 @@ public void setApplicationContext(ApplicationContext applicationContext) throws /** * Set the {@link EntityCallbacks} instance to use when invoking * {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the {@link BeforeSaveCallback}. - *

        + *
        * Overrides potentially existing {@link EntityCallbacks}. * * @param entityCallbacks must not be {@literal null}. @@ -2664,7 +2664,7 @@ Document getMappedValidator(Validator validator, Class domainType) { /** * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. * The first document that matches the query is returned and also removed from the collection in the database. - *

        + *
        * The query document is specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from @@ -3493,7 +3493,7 @@ public MongoDatabaseFactory getMongoDatabaseFactory() { /** * {@link MongoTemplate} extension bound to a specific {@link ClientSession} that is applied when interacting with the * server through the driver API. - *

        + *
        * The prepare steps for {@link MongoDatabase} and {@link MongoCollection} proxy the target and invoke the desired * target method matching the actual arguments plus a {@link ClientSession}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java index 279f4184fb..d834af4b32 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java @@ -71,7 +71,7 @@ interface TerminatingChangeStream { /** * Start listening to changes. The stream will not be completed unless the {@link org.reactivestreams.Subscription} * is {@link org.reactivestreams.Subscription#cancel() canceled}. - *

        + *
        * However, the stream may become dead, or invalid, if all watched collections, databases are dropped. */ Flux> listen(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java index 9a65090922..b06623197d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java @@ -91,10 +91,10 @@ interface TerminatingFind { * Get all matching elements using a {@link com.mongodb.CursorType#TailableAwait tailable cursor}. The stream will * not be completed unless the {@link org.reactivestreams.Subscription} is * {@link org.reactivestreams.Subscription#cancel() canceled}. - *

        + *
        * However, the stream may become dead, or invalid, if either the query returns no match or the cursor returns the * document at the "end" of the collection and then the application deletes that document. - *

        + *
        * A stream that is no longer in use must be {@link reactor.core.Disposable#dispose()} disposed} otherwise the * streams will linger and exhaust resources.
        * NOTE: Requires a capped collection. @@ -106,7 +106,7 @@ interface TerminatingFind { /** * Get the number of matching elements. - *

        + *
        * This method uses an * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java index fb1c260305..0f54bef685 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java @@ -59,7 +59,7 @@ * Implemented by {@link ReactiveMongoTemplate}. Not often used but a useful option for extensibility and testability * (as it can be easily mocked, stubbed, or be the target of a JDK proxy). Command execution using * {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}. - *

        + *
        * NOTE: Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB * specific documentation to learn more about Multi * Document Transactions. @@ -121,7 +121,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { /** * Executes a {@link ReactiveDatabaseCallback} translating any exceptions as necessary. - *

        + *
        * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not @@ -133,7 +133,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { /** * Executes the given {@link ReactiveCollectionCallback} on the entity collection of the specified class. - *

        + *
        * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param entityClass class that determines the collection to use. Must not be {@literal null}. @@ -145,7 +145,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { /** * Executes the given {@link ReactiveCollectionCallback} on the collection of the given name. - *

        + *
        * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be @@ -159,7 +159,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { /** * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession} * provided by the given {@link Supplier} to each and every command issued against MongoDB. - *

        + *
        * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the * {@link ClientSession} when done. @@ -178,7 +178,7 @@ default ReactiveSessionScoped withSession(Supplier sessionProvide /** * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding a new {@link ClientSession} * with given {@literal sessionOptions} to each and every command issued against MongoDB. - *

        + *
        * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the * {@link ClientSession} when done. @@ -192,7 +192,7 @@ default ReactiveSessionScoped withSession(Supplier sessionProvide /** * Obtain a {@link ClientSession session} bound instance of {@link ReactiveSessionScoped} binding the * {@link ClientSession} provided by the given {@link Publisher} to each and every command issued against MongoDB. - *

        + *
        * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the * {@link ClientSession} when done. @@ -205,7 +205,7 @@ default ReactiveSessionScoped withSession(Supplier sessionProvide /** * Obtain a {@link ClientSession} bound instance of {@link ReactiveMongoOperations}. - *

        + *
        * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. * * @param session must not be {@literal null}. @@ -218,7 +218,7 @@ default ReactiveSessionScoped withSession(Supplier sessionProvide * Initiate a new {@link ClientSession} and obtain a {@link ClientSession session} bound instance of * {@link ReactiveSessionScoped}. Starts the transaction and adds the {@link ClientSession} to each and every command * issued against MongoDB. - *

        + *
        * Each {@link ReactiveSessionScoped#execute(ReactiveSessionCallback) execution} initiates a new managed transaction * that is {@link ClientSession#commitTransaction() committed} on success. Transactions are * {@link ClientSession#abortTransaction() rolled back} upon errors. @@ -233,7 +233,7 @@ default ReactiveSessionScoped withSession(Supplier sessionProvide * Obtain a {@link ClientSession session} bound instance of {@link ReactiveSessionScoped}, start the transaction and * bind the {@link ClientSession} provided by the given {@link Publisher} to each and every command issued against * MongoDB. - *

        + *
        * Each {@link ReactiveSessionScoped#execute(ReactiveSessionCallback) execution} initiates a new managed transaction * that is {@link ClientSession#commitTransaction() committed} on success. Transactions are * {@link ClientSession#abortTransaction() rolled back} upon errors. @@ -293,7 +293,7 @@ Mono> createCollection(Class entityClass, * created on first interaction with the server. Collections can be explicitly created via * {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class) * exists} first. - *

        + *
        * Translate any exceptions as necessary. * * @param collectionName name of the collection. @@ -303,7 +303,7 @@ Mono> createCollection(Class entityClass, /** * Check to see if a collection with a name indicated by the entity class exists. - *

        + *
        * Translate any exceptions as necessary. * * @param entityClass class that determines the name of the collection. Must not be {@literal null}. @@ -313,7 +313,7 @@ Mono> createCollection(Class entityClass, /** * Check to see if a collection with a given name exists. - *

        + *
        * Translate any exceptions as necessary. * * @param collectionName name of the collection. Must not be {@literal null}. @@ -323,7 +323,7 @@ Mono> createCollection(Class entityClass, /** * Drop the collection with the name indicated by the entity class. - *

        + *
        * Translate any exceptions as necessary. * * @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}. @@ -332,7 +332,7 @@ Mono> createCollection(Class entityClass, /** * Drop the collection with the given name. - *

        + *
        * Translate any exceptions as necessary. * * @param collectionName name of the collection to drop/delete. @@ -341,10 +341,10 @@ Mono> createCollection(Class entityClass, /** * Query for a {@link Flux} of objects of type T from the collection used by the entity class. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -355,10 +355,10 @@ Mono> createCollection(Class entityClass, /** * Query for a {@link Flux} of objects of type T from the specified collection. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -371,10 +371,10 @@ Mono> createCollection(Class entityClass, /** * Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the * specified type. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -388,10 +388,10 @@ Mono> createCollection(Class entityClass, /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified * type. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -435,10 +435,10 @@ Mono> createCollection(Class entityClass, /** * Map the results of an ad-hoc query on the collection for the entity class to a {@link Flux} of the specified type. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -451,10 +451,10 @@ Mono> createCollection(Class entityClass, /** * Map the results of an ad-hoc query on the specified collection to a {@link Flux} of the specified type. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -566,10 +566,10 @@ default Flux findDistinct(Query query, String field, String collection, C /** * Execute an aggregation operation. - *

        + *
        * The raw results will be mapped to the given entity class and are returned as stream. The name of the * inputCollection is derived from the {@link TypedAggregation#getInputType() aggregation input type}. - *

        + *
        * Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause * {@link IllegalArgumentException}. @@ -584,10 +584,10 @@ default Flux findDistinct(Query query, String field, String collection, C /** * Execute an aggregation operation. - *

        + *
        * The raw results will be mapped to the given {@code ouputType}. The name of the inputCollection is derived from the * {@code inputType}. - *

        + *
        * Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause * {@link IllegalArgumentException}. @@ -604,9 +604,9 @@ default Flux findDistinct(Query query, String field, String collection, C /** * Execute an aggregation operation. - *

        + *
        * The raw results will be mapped to the given entity class. - *

        + *
        * Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause * {@link IllegalArgumentException}. @@ -676,7 +676,7 @@ default Flux findDistinct(Query query, String field, String collection, C Flux> geoNear(NearQuery near, Class entityClass, String collectionName); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. * * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional @@ -691,7 +691,7 @@ default Flux findDistinct(Query query, String field, String collection, C Mono findAndModify(Query query, UpdateDefinition update, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. * * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional @@ -707,7 +707,7 @@ default Flux findDistinct(Query query, String field, String collection, C Mono findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. * @@ -725,7 +725,7 @@ default Flux findDistinct(Query query, String field, String collection, C Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. * @@ -746,7 +746,7 @@ Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOpt /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} * document.
        * Options are defaulted to {@link FindAndReplaceOptions#empty()}.
        @@ -764,7 +764,7 @@ default Mono findAndReplace(Query query, T replacement) { /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} * document.
        * Options are defaulted to {@link FindAndReplaceOptions#empty()}.
        @@ -783,7 +783,7 @@ default Mono findAndReplace(Query query, T replacement, String collection /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
        * NOTE: The replacement entity must not hold an {@literal id}. @@ -803,7 +803,7 @@ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOpt /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
        * NOTE: The replacement entity must not hold an {@literal id}. @@ -825,7 +825,7 @@ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOpt /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
        * NOTE: The replacement entity must not hold an {@literal id}. @@ -849,7 +849,7 @@ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOpt /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
        * NOTE: The replacement entity must not hold an {@literal id}. @@ -876,7 +876,7 @@ default Mono findAndReplace(Query query, S replacement, FindAndReplace /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
        * NOTE: The replacement entity must not hold an {@literal id}. @@ -902,9 +902,9 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions * Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the * specified type. The first document that matches the query is returned and also removed from the collection in the * database. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. - *

        + *
        * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -918,10 +918,10 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified * type. The first document that matches the query is returned and also removed from the collection in the database. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -940,7 +940,7 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to * count all matches. - *

        + *
        * This method uses an * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees @@ -962,7 +962,7 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to * count all matches. - *

        + *
        * This method uses an * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees @@ -983,7 +983,7 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions * influence on the resulting number of documents found as those values are passed on to the server and potentially * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to * count all matches. - *

        + *
        * This method uses an * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees @@ -1001,7 +1001,7 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions /** * Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type}, * based on collection statistics. - *

        + *
        * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside * transactions. * @@ -1017,7 +1017,7 @@ default Mono estimatedCount(Class entityClass) { /** * Estimate the number of documents in the given collection based on collection statistics. - *

        + *
        * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside * transactions. * @@ -1029,17 +1029,17 @@ default Mono estimatedCount(Class entityClass) { /** * Insert the object into the collection for the entity type of the object to save. - *

        + *
        * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. - *

        + *
        * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See * Spring's * Type Conversion" for more details. - *

        + *
        * Insert is used to initially store the object into the database. To update an existing object use the save method. - *

        + *
        * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. @@ -1050,12 +1050,12 @@ default Mono estimatedCount(Class entityClass) { /** * Insert the object into the specified collection. - *

        + *
        * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * Insert is used to initially store the object into the database. To update an existing object use the save method. - *

        + *
        * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. @@ -1094,15 +1094,15 @@ default Mono estimatedCount(Class entityClass) { /** * Insert the object into the collection for the entity type of the object to save. - *

        + *
        * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. - *

        + *
        * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See * Spring's * Type Conversion" for more details. - *

        + *
        * Insert is used to initially store the object into the database. To update an existing object use the save method. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. @@ -1140,16 +1140,16 @@ default Mono estimatedCount(Class entityClass) { /** * Save the object to the collection for the entity type of the object to save. This will perform an insert if the * object is not already present, that is an 'upsert'. - *

        + *
        * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See * Spring's * Type Conversion" for more details. - *

        + *
        * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. @@ -1161,15 +1161,14 @@ default Mono estimatedCount(Class entityClass) { /** * Save the object to the specified collection. This will perform an insert if the object is not already present, that * is an 'upsert'. - *

        + *
        * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's Type - * Conversion" for more details. + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. + * See Spring's Type Conversion for more details. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. @@ -1181,15 +1180,14 @@ default Mono estimatedCount(Class entityClass) { /** * Save the object to the collection for the entity type of the object to save. This will perform an insert if the * object is not already present, that is an 'upsert'. - *

        + *
        * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See - * Spring's - * Type Conversion" for more details. + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. + * See Spring's Type Conversion for more details. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @return the saved object. @@ -1199,17 +1197,16 @@ default Mono estimatedCount(Class entityClass) { /** * Save the object to the specified collection. This will perform an insert if the object is not already present, that * is an 'upsert'. - *

        + *
        * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's Type - * Conversion" for more details. + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. + * See Spring's Type Conversion for more details. * - * @param objectToSave the object to store in the collection. Must not be {@literal null}. + * @param objectToSave the object to store in the collReactiveMongoOperationsection. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. * @return the saved object. */ @@ -1481,10 +1478,10 @@ default Mono estimatedCount(Class entityClass) { * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is * {@link Subscription#cancel() canceled}. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -1500,10 +1497,10 @@ default Mono estimatedCount(Class entityClass) { * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is * {@link Subscription#cancel() canceled}. - *

        + *
        * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

        + *
        * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * @@ -1520,10 +1517,10 @@ default Mono estimatedCount(Class entityClass) { * the configured default database via the reactive infrastructure. Use the optional provided {@link Aggregation} to * filter events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is * {@link Subscription#cancel() canceled}. - *

        + *
        * The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload. - *

        + *
        * Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} * for resuming change streams. * @@ -1544,10 +1541,10 @@ default Flux> changeStream(ChangeStreamOptions options, * the given collection via the reactive infrastructure. Use the optional provided {@link Aggregation} to filter * events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is * {@link Subscription#cancel() canceled}. - *

        + *
        * The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload. - *

        + *
        * Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} * for resuming change streams. * @@ -1569,10 +1566,10 @@ default Flux> changeStream(@Nullable String collectionN * Subscribe to a MongoDB Change Stream via the reactive * infrastructure. Use the optional provided {@link Aggregation} to filter events. The stream will not be completed * unless the {@link org.reactivestreams.Subscription} is {@link Subscription#cancel() canceled}. - *

        + *
        * The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload. - *

        + *
        * Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} * for resuming change streams. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java index 82a3d12260..a7d0113e8f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java @@ -362,7 +362,7 @@ public void setApplicationContext(ApplicationContext applicationContext) throws * Set the {@link ReactiveEntityCallbacks} instance to use when invoking * {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the * {@link ReactiveBeforeSaveCallback}. - *

        + *
        * Overrides potentially existing {@link ReactiveEntityCallbacks}. * * @param entityCallbacks must not be {@literal null}. @@ -2537,7 +2537,7 @@ private Document getMappedValidator(Validator validator, Class domainType) { /** * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. * The first document that matches the query is returned and also removed from the collection in the database. - *

        + *
        * The query document is specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from @@ -3390,7 +3390,7 @@ private static List toDocuments(Collection + *
        * The prepare steps for {@link MongoDatabase} and {@link MongoCollection} proxy the target and invoke the desired * target method matching the actual arguments plus a {@link ClientSession}. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java index c9b15324fc..8ac447eeb5 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java @@ -32,7 +32,7 @@ public interface ReactiveSessionCallback { /** * Execute operations against a MongoDB instance via session bound {@link ReactiveMongoOperations}. The session is * inferred directly into the operation so that no further interaction is necessary. - *

        + *
        * Please note that only Spring Data-specific abstractions like {@link ReactiveMongoOperations#find(Query, Class)} and * others are enhanced with the {@link com.mongodb.session.ClientSession}. When obtaining plain MongoDB gateway * objects like {@link com.mongodb.reactivestreams.client.MongoCollection} or diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java index 17c17edd24..2519a8bb1a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java @@ -33,7 +33,7 @@ public interface ReactiveSessionScoped { /** * Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}. - *

        + *
        * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() * closed} when done. * @@ -47,7 +47,7 @@ default Flux execute(ReactiveSessionCallback action) { /** * Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}. - *

        + *
        * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() * closed} when done. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java index 24ad1c5ffc..36f8113021 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java @@ -23,7 +23,7 @@ /** - * Script operations on {@link com.mongodb.DB} level. Allows interaction with server side JavaScript functions. + * Script operations on {@link com.mongodb.client.MongoDatabase} level. Allows interaction with server side JavaScript functions. * * @author Christoph Strobl * @author Oliver Gierke @@ -72,10 +72,10 @@ public interface ScriptOperations { Object call(String scriptName, Object... args); /** - * Checks {@link DB} for existence of {@link ServerSideJavaScript} with given name. + * Checks {@link com.mongodb.client.MongoDatabase} for existence of {@literal ServerSideJavaScript} with given name. * * @param scriptName must not be {@literal null} or empty. - * @return false if no {@link ServerSideJavaScript} with given name exists. + * @return false if no {@literal ServerSideJavaScript} with given name exists. */ boolean exists(String scriptName); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java index c12d4b1005..93d0c71378 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java @@ -31,7 +31,7 @@ public interface SessionCallback { /** * Execute operations against a MongoDB instance via session bound {@link MongoOperations}. The session is inferred * directly into the operation so that no further interaction is necessary. - *

        + *
        * Please note that only Spring Data-specific abstractions like {@link MongoOperations#find(Query, Class)} and others * are enhanced with the {@link com.mongodb.session.ClientSession}. When obtaining plain MongoDB gateway objects like * {@link com.mongodb.client.MongoCollection} or {@link com.mongodb.client.MongoDatabase} via eg. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java index ead52ee15e..5bba65144a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java @@ -23,7 +23,7 @@ /** * Gateway interface to execute {@link ClientSession} bound operations against MongoDB via a {@link SessionCallback}. - *

        + *
        * The very same bound {@link ClientSession} is used for all invocations of {@code execute} on the instance. * * @author Christoph Strobl @@ -34,7 +34,7 @@ public interface SessionScoped { /** * Executes the given {@link SessionCallback} within the {@link com.mongodb.session.ClientSession}. - *

        + *
        * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() * closed} when done. * @@ -49,7 +49,7 @@ default T execute(SessionCallback action) { /** * Executes the given {@link SessionCallback} within the {@link com.mongodb.session.ClientSession}. - *

        + *
        * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() * closed} when done. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java index 614489692c..e4894fbef0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java @@ -227,7 +227,7 @@ public static String previousOperation() { /** * Obtain an {@link AddFieldsOperationBuilder builder} instance to create a new {@link AddFieldsOperation}. - *

        + *
        * Starting in version 4.2, MongoDB adds a new aggregation pipeline stage {@link AggregationUpdate#set $set} that is * an alias for {@code $addFields}. * @@ -726,7 +726,7 @@ public AggregationPipeline getPipeline() { /** * Converts this {@link Aggregation} specification to a {@link Document}. - *

        + *
        * MongoDB requires as of 3.6 cursor-based aggregation. Use {@link #toPipeline(AggregationOperationContext)} to render * an aggregation pipeline. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java index 14fa8c48d1..e406f57874 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java @@ -24,15 +24,15 @@ * expression.
        *
        * Samples:
        - * *

        + * 
          * // { $and: [ { $gt: [ "$qty", 100 ] }, { $lt: [ "$qty", 250 ] } ] }
          * expressionOf("qty > 100 && qty < 250);
          *
          * // { $cond : { if : { $gte : [ "$a", 42 ]}, then : "answer", else : "no-answer" } }
          * expressionOf("cond(a >= 42, 'answer', 'no-answer')");
        - * 
        * + *
        * * @author Christoph Strobl * @author Mark Paluch diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java index e69531e036..3cbb5f8735 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java @@ -71,8 +71,7 @@ * * @author Christoph Strobl * @author Mark Paluch - * @see MongoDB + * @see MongoDB * Reference Documentation * @since 3.0 */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 9c9132e679..0fbfcac411 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -825,7 +825,7 @@ public ATan atan() { * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by * the given numeric value in the argument. * - * @param the numeric value + * @param value the numeric value * @return new instance of {@link ATan2}. * @since 3.3 */ @@ -839,7 +839,7 @@ public ATan2 atan2(Number value) { * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by * the given field reference in the argument. * - * @param the numeric value + * @param fieldReference the numeric value * @return new instance of {@link ATan2}. * @since 3.3 */ @@ -853,7 +853,7 @@ public ATan2 atan2(String fieldReference) { * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by * the given {@link AggregationExpression} in the argument. * - * @param the numeric value + * @param expression the expression evaluating to a numeric value * @return new instance of {@link ATan2}. * @since 3.3 */ @@ -2169,7 +2169,7 @@ private Sin(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in * {@link AngularUnit#RADIANS radians}. - *

        + *
        * Use {@code sinhOf("angle", DEGREES)} as shortcut for * *

        @@ -2282,7 +2282,7 @@ public static Sinh sinhOf(String fieldReference) {
         		/**
         		 * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in
         		 * the given {@link AngularUnit unit}.
        -		 * 

        + *
        * Use {@code sinhOf("angle", DEGREES)} as shortcut for * *

        @@ -2302,7 +2302,7 @@ public static Sinh sinhOf(String fieldReference, AngularUnit unit) {
         		/**
         		 * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in
         		 * {@link AngularUnit#RADIANS}.
        -		 * 

        + *
        * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. * @@ -2386,7 +2386,7 @@ public static ASin asinOf(String fieldReference) { /** * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. - *

        + *
        * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link ASin}. @@ -2436,7 +2436,7 @@ public static ASinh asinhOf(String fieldReference) { /** * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. - *

        + *
        * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link ASinh}. @@ -2478,7 +2478,7 @@ private Cos(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in * {@link AngularUnit#RADIANS radians}. - *

        + *
        * Use {@code cosOf("angle", DEGREES)} as shortcut for * *

        @@ -2589,7 +2589,7 @@ public static Cosh coshOf(String fieldReference) {
         		/**
         		 * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in
         		 * the given {@link AngularUnit unit}.
        -		 * 

        + *
        * Use {@code coshOf("angle", DEGREES)} as shortcut for * *

        @@ -2607,7 +2607,7 @@ public static Cosh coshOf(String fieldReference, AngularUnit unit) {
         		/**
         		 * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in
         		 * {@link AngularUnit#RADIANS}.
        -		 * 

        + *
        * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. * @@ -2680,7 +2680,7 @@ private Tan(Object value) { /** * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in * {@link AngularUnit#RADIANS radians}. - *

        + *
        * Use {@code tanOf("angle", DEGREES)} as shortcut for * *

        @@ -2859,7 +2859,7 @@ public static ATan2 valueOf(AggregationExpression expression) {
         		 * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are
         		 * the first and second values passed to the expression respectively.
         		 *
        -		 * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a
        +		 * @param fieldReference anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a
         		 *          numeric value.
         		 * @return new instance of {@link ATan2}.
         		 */
        @@ -2873,7 +2873,7 @@ public ATan2 atan2of(String fieldReference) {
         		 * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in
         		 * {@link AngularUnit#RADIANS}.
         		 *
        -		 * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a
        +		 * @param expression anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a
         		 *          numeric value.
         		 * @return new instance of {@link ATan2}.
         		 */
        @@ -2927,7 +2927,7 @@ public static Tanh tanhOf(String fieldReference) {
         		/**
         		 * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in
         		 * the given {@link AngularUnit unit}.
        -		 * 

        + *
        * Use {@code tanhOf("angle", DEGREES)} as shortcut for * *

        @@ -2945,7 +2945,7 @@ public static Tanh tanhOf(String fieldReference, AngularUnit unit) {
         		/**
         		 * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in
         		 * {@link AngularUnit#RADIANS}.
        -		 * 

        + *
        * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. * @@ -3029,7 +3029,7 @@ public static ATanh atanhOf(String fieldReference) { /** * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a value. - *

        + *
        * * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. * @return new instance of {@link ATanh}. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java index 235c16befe..f5755346bb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java @@ -28,8 +28,7 @@ * We recommend to use the static factory method {@link Aggregation#bucketAuto(String, int)} instead of creating * instances of this class directly. * - * @see https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/ + * @see https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/ * @see BucketOperationSupport * @author Mark Paluch * @author Christoph Strobl @@ -248,8 +247,7 @@ public interface Granularity { /** * Supported MongoDB granularities. * - * @see https://docs.mongodb.com/manual/reference/operator/aggregation/bucketAuto/#granularity * @author Mark Paluch */ public enum Granularities implements Granularity { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java index 173fa4ece5..937ec029cb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java @@ -31,8 +31,7 @@ * We recommend to use the static factory method {@link Aggregation#bucket(String)} instead of creating instances of * this class directly. * - * @see https://docs.mongodb.org/manual/reference/aggregation/bucket/ + * @see https://docs.mongodb.org/manual/reference/aggregation/bucket/ * @see BucketOperationSupport * @author Mark Paluch * @since 1.10 diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java index 637ebd8d8f..3555ada8a7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java @@ -328,9 +328,9 @@ public Convert to(String stringTypeIdentifier) { *

        1
        *
        double
        *
        2
        - *
        string + *
        string
        *
        7
        - *
        objectId + *
        objectId
        *
        8
        *
        bool
        *
        9
        diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java index e2b65aa7ff..95e63ac8f7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java @@ -24,8 +24,7 @@ * We recommend to use the static factory method {@link Aggregation#count()} instead of creating instances of this class * directly. * - * @see https://docs.mongodb.com/manual/reference/operator/aggregation/count/ + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/count/ * @author Mark Paluch * @since 1.10 */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java index 029b994f2e..d1e45a8b93 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java @@ -92,7 +92,7 @@ public static DateOperatorFactory zonedDateOf(AggregationExpression expression, /** * Take the given value as date. - *

        + *
        * This can be one of: *