diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 6c5d559a8a..e8f632af23 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -6,7 +6,6 @@ Make sure that: --> - [ ] You have read the [Spring Data contribution guidelines](https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc). -- [ ] There is a ticket in the bug tracker for the project in our [JIRA](https://jira.spring.io/browse/DATAMONGO). - [ ] You use the code formatters provided [here](https://github.com/spring-projects/spring-data-build/tree/master/etc/ide) and have them applied to your changes. Don’t submit any formatting related changes. - [ ] You submit test cases (unit or integration tests) that back your changes. - [ ] You added yourself as author in the headers of the classes you touched. Amend the date range in the Apache license header if needed. For new types, add the license header (copy from another file and set the current year only). diff --git a/.github/dco.yml b/.github/dco.yml new file mode 100644 index 0000000000..0c4b142e9a --- /dev/null +++ b/.github/dco.yml @@ -0,0 +1,2 @@ +require: + members: false diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml new file mode 100644 index 0000000000..a5f764579a --- /dev/null +++ b/.github/workflows/project.yml @@ -0,0 +1,40 @@ +# GitHub Actions to automate GitHub issues for Spring Data Project Management + +name: Spring Data GitHub Issues + +on: + issues: + types: [opened, edited, reopened] + issue_comment: + types: [created] + pull_request_target: + types: [opened, edited, reopened] + +jobs: + Inbox: + runs-on: ubuntu-latest + if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request == null && !contains(join(github.event.issue.labels.*.name, ', '), 'dependency-upgrade') && !contains(github.event.issue.title, 'Release ') + steps: + - name: Create or Update Issue Card + uses: actions/add-to-project@v1.0.2 + with: + project-url: https://github.com/orgs/spring-projects/projects/25 + github-token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} + Pull-Request: + runs-on: ubuntu-latest + if: github.repository_owner == 'spring-projects' && (github.event.action == 'opened' || github.event.action == 'reopened') && github.event.pull_request != null + steps: + - name: Create or Update Pull Request Card + uses: actions/add-to-project@v1.0.2 + with: + project-url: https://github.com/orgs/spring-projects/projects/25 + github-token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} + Feedback-Provided: + runs-on: ubuntu-latest + if: github.repository_owner == 'spring-projects' && github.event_name == 'issue_comment' && github.event.action == 'created' && github.actor != 'spring-projects-issues' && github.event.pull_request == null && github.event.issue.state == 'open' && contains(toJSON(github.event.issue.labels), 'waiting-for-feedback') + steps: + - name: Update Project Card + uses: actions/add-to-project@v1.0.2 + with: + project-url: https://github.com/orgs/spring-projects/projects/25 + github-token: ${{ secrets.GH_ISSUES_TOKEN_SPRING_DATA }} diff --git a/.gitignore b/.gitignore index be372b6209..27b7a78896 100644 --- a/.gitignore +++ b/.gitignore @@ -15,4 +15,9 @@ src/ant/.ant-targets-upload-dist.xml atlassian-ide-plugin.xml /.gradle/ /.idea/ -*.graphml \ No newline at end of file +*.graphml +build/ +node_modules +node +package-lock.json +.mvn/.develocity diff --git a/.mvn/extensions.xml b/.mvn/extensions.xml new file mode 100644 index 0000000000..e0857eaa25 --- /dev/null +++ b/.mvn/extensions.xml @@ -0,0 +1,8 @@ + + + + io.spring.develocity.conventions + develocity-conventions-maven-extension + 0.0.22 + + diff --git a/.mvn/jvm.config b/.mvn/jvm.config new file mode 100644 index 0000000000..e27f6e8f5e --- /dev/null +++ b/.mvn/jvm.config @@ -0,0 +1,14 @@ +--add-exports jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED +--add-exports jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED +--add-opens jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED +--add-opens=java.base/java.util=ALL-UNNAMED +--add-opens=java.base/java.lang.reflect=ALL-UNNAMED +--add-opens=java.base/java.text=ALL-UNNAMED +--add-opens=java.desktop/java.awt.font=ALL-UNNAMED diff --git a/.mvn/wrapper/maven-wrapper.jar b/.mvn/wrapper/maven-wrapper.jar new file mode 100755 index 0000000000..01e6799737 Binary files /dev/null and b/.mvn/wrapper/maven-wrapper.jar differ diff --git a/.mvn/wrapper/maven-wrapper.properties b/.mvn/wrapper/maven-wrapper.properties new file mode 100755 index 0000000000..5f3193b363 --- /dev/null +++ b/.mvn/wrapper/maven-wrapper.properties @@ -0,0 +1,2 @@ +#Thu Nov 07 09:47:19 CET 2024 +distributionUrl=https\://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.9/apache-maven-3.9.9-bin.zip diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 604395caa9..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,35 +0,0 @@ -language: java - -jdk: - - oraclejdk8 - -before_install: - - mkdir -p downloads - - mkdir -p var/db var/log - - if [[ ! -d downloads/mongodb-linux-x86_64-ubuntu1604-${MONGO_VERSION} ]] ; then cd downloads && wget https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-ubuntu1604-${MONGO_VERSION}.tgz && tar xzf mongodb-linux-x86_64-ubuntu1604-${MONGO_VERSION}.tgz && cd ..; fi - - downloads/mongodb-linux-x86_64-ubuntu1604-${MONGO_VERSION}/bin/mongod --version - - downloads/mongodb-linux-x86_64-ubuntu1604-${MONGO_VERSION}/bin/mongod --dbpath var/db --replSet rs0 --fork --logpath var/log/mongod.log - - sleep 10 - - |- - downloads/mongodb-linux-x86_64-ubuntu1604-${MONGO_VERSION}/bin/mongo --eval "rs.initiate({_id: 'rs0', members:[{_id: 0, host: '127.0.0.1:27017'}]});" - sleep 15 - -env: - matrix: - - PROFILE=ci - global: - - MONGO_VERSION=4.0.0 - -addons: - apt: - packages: - - oracle-java8-installer - -sudo: false - -cache: - directories: - - $HOME/.m2 - - downloads - -script: "mvn clean dependency:list test -P${PROFILE} -Dsort" diff --git a/CI.adoc b/CI.adoc new file mode 100644 index 0000000000..057100a955 --- /dev/null +++ b/CI.adoc @@ -0,0 +1,43 @@ += Continuous Integration + +image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Moore%20(main)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] +image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F2.1.x&subject=Lovelace%20(2.1.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] +image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2F1.10.x&subject=Ingalls%20(1.10.x)[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] + +== Running CI tasks locally + +Since this pipeline is purely Docker-based, it's easy to: + +* Debug what went wrong on your local machine. +* Test out a a tweak to your test routine before sending it out. +* Experiment against a new image before submitting your pull request. + +All of these use cases are great reasons to essentially run what the CI server does on your local machine. + +IMPORTANT: To do this you must have Docker installed on your machine. + +1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk17-with-mongodb-5.0.3:latest /bin/bash` ++ +This will launch the Docker image and mount your source code at `spring-data-mongodb-github`. ++ +2. `cd spring-data-mongodb-github` ++ +Next, run the tests from inside the container: ++ +3. `./mvnw clean dependency:list test -Dsort -Dbundlor.enabled=false -B` (or with whatever profile you need to test out) + +Since the container is binding to your source, you can make edits from your IDE and continue to run build jobs. + +If you need to package things up, do this: + +1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk8-with-mongodb-4.0:latest /bin/bash` ++ +This will launch the Docker image and mount your source code at `spring-data-mongodb-github`. ++ +2. `cd spring-data-mongodb-github` ++ +Next, package things from inside the container doing this: ++ +3. `./mvnw clean dependency:list package -Dsort -Dbundlor.enabled=false -B` + +NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images. diff --git a/CODE_OF_CONDUCT.adoc b/CODE_OF_CONDUCT.adoc deleted file mode 100644 index f64fb1b7a5..0000000000 --- a/CODE_OF_CONDUCT.adoc +++ /dev/null @@ -1,27 +0,0 @@ -= Contributor Code of Conduct - -As contributors and maintainers of this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities. - -We are committed to making participation in this project a harassment-free experience for everyone, regardless of level of experience, gender, gender identity and expression, sexual orientation, disability, personal appearance, body size, race, ethnicity, age, religion, or nationality. - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, such as physical or electronic addresses, - without explicit permission -* Other unethical or unprofessional conduct - -Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. - -By adopting this Code of Conduct, project maintainers commit themselves to fairly and consistently applying these principles to every aspect of managing this project. Project maintainers who do not follow or enforce the Code of Conduct may be permanently removed from the project team. - -This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting a project maintainer at spring-code-of-conduct@pivotal.io. -All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. -Maintainers are obligated to maintain confidentiality with regard to the reporter of an incident. - -This Code of Conduct is adapted from the http://contributor-covenant.org[Contributor Covenant], version 1.3.0, available at http://contributor-covenant.org/version/1/3/0/[contributor-covenant.org/version/1/3/0/]. \ No newline at end of file diff --git a/CONTRIBUTING.adoc b/CONTRIBUTING.adoc index f007591467..740e8bd0bb 100644 --- a/CONTRIBUTING.adoc +++ b/CONTRIBUTING.adoc @@ -1,3 +1,3 @@ = Spring Data contribution guidelines -You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/master/CONTRIBUTING.adoc[here]. +You find the contribution guidelines for Spring Data projects https://github.com/spring-projects/spring-data-build/blob/main/CONTRIBUTING.adoc[here]. diff --git a/Jenkinsfile b/Jenkinsfile new file mode 100644 index 0000000000..ce2f272334 --- /dev/null +++ b/Jenkinsfile @@ -0,0 +1,176 @@ +def p = [:] +node { + checkout scm + p = readProperties interpolate: true, file: 'ci/pipeline.properties' +} + +pipeline { + agent none + + triggers { + pollSCM 'H/10 * * * *' + upstream(upstreamProjects: "spring-data-commons/main", threshold: hudson.model.Result.SUCCESS) + } + + options { + disableConcurrentBuilds() + buildDiscarder(logRotator(numToKeepStr: '14')) + } + + stages { + stage("Docker images") { + parallel { + stage('Publish JDK (Java 24) + MongoDB 8.0') { + when { + anyOf { + changeset "ci/openjdk24-mongodb-8.0/**" + changeset "ci/pipeline.properties" + } + } + agent { label 'data' } + options { timeout(time: 30, unit: 'MINUTES') } + + steps { + script { + def image = docker.build("springci/spring-data-with-mongodb-8.0:${p['java.main.tag']}", "--build-arg BASE=${p['docker.java.main.image']} --build-arg MONGODB=${p['docker.mongodb.7.0.version']} ci/openjdk24-mongodb-8.0/") + docker.withRegistry(p['docker.registry'], p['docker.credentials']) { + image.push() + } + } + } + } + stage('Publish JDK (Java.next) + MongoDB 8.0') { + when { + anyOf { + changeset "ci/openjdk24-mongodb-8.0/**" + changeset "ci/pipeline.properties" + } + } + agent { label 'data' } + options { timeout(time: 30, unit: 'MINUTES') } + + steps { + script { + def image = docker.build("springci/spring-data-with-mongodb-8.0:${p['java.next.tag']}", "--build-arg BASE=${p['docker.java.next.image']} --build-arg MONGODB=${p['docker.mongodb.8.0.version']} ci/openjdk24-mongodb-8.0/") + docker.withRegistry(p['docker.registry'], p['docker.credentials']) { + image.push() + } + } + } + } + } + } + + stage("test: baseline (main)") { + when { + beforeAgent(true) + anyOf { + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") + not { triggeredBy 'UpstreamCause' } + } + } + agent { + label 'data' + } + options { timeout(time: 30, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") + } + steps { + script { + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image("springci/spring-data-with-mongodb-8.0:${p['java.main.tag']}").inside(p['docker.java.inside.docker']) { + sh 'ci/start-replica.sh' + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "./mvnw -s settings.xml -Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-mongodb clean dependency:list test -Dsort -U -B" + } + } + } + } + } + + stage("Test other configurations") { + when { + beforeAgent(true) + allOf { + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") + not { triggeredBy 'UpstreamCause' } + } + } + parallel { + + stage("test: MongoDB 8.0") { + agent { + label 'data' + } + options { timeout(time: 30, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") + } + steps { + script { + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image("springci/spring-data-with-mongodb-8.0:${p['java.next.tag']}").inside(p['docker.java.inside.docker']) { + sh 'ci/start-replica.sh' + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "./mvnw -s settings.xml -Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root -Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-mongodb clean dependency:list test -Dsort -U -B" + } + } + } + } + } + } + } + + stage('Release to artifactory') { + when { + beforeAgent(true) + anyOf { + branch(pattern: "main|(\\d\\.\\d\\.x)", comparator: "REGEXP") + not { triggeredBy 'UpstreamCause' } + } + } + agent { + label 'data' + } + options { timeout(time: 20, unit: 'MINUTES') } + environment { + ARTIFACTORY = credentials("${p['artifactory.credentials']}") + DEVELOCITY_ACCESS_KEY = credentials("${p['develocity.access-key']}") + } + steps { + script { + docker.withRegistry(p['docker.proxy.registry'], p['docker.proxy.credentials']) { + docker.image(p['docker.java.main.image']).inside(p['docker.java.inside.docker']) { + sh 'MAVEN_OPTS="-Duser.name=' + "${p['jenkins.user.name']}" + ' -Duser.home=/tmp/jenkins-home" ' + + "./mvnw -s settings.xml -Pci,artifactory " + + "-Ddevelocity.storage.directory=/tmp/jenkins-home/.develocity-root " + + "-Dartifactory.server=${p['artifactory.url']} " + + "-Dartifactory.username=${ARTIFACTORY_USR} " + + "-Dartifactory.password=${ARTIFACTORY_PSW} " + + "-Dartifactory.staging-repository=${p['artifactory.repository.snapshot']} " + + "-Dartifactory.build-name=spring-data-mongodb " + + "-Dartifactory.build-number=spring-data-mongodb-${BRANCH_NAME}-build-${BUILD_NUMBER} " + + "-Dmaven.repo.local=/tmp/jenkins-home/.m2/spring-data-mongodb " + + "-Dmaven.test.skip=true clean deploy -U -B" + } + } + } + } + } + } + + post { + changed { + script { + emailext( + subject: "[${currentBuild.fullDisplayName}] ${currentBuild.currentResult}", + mimeType: 'text/html', + recipientProviders: [[$class: 'CulpritsRecipientProvider'], [$class: 'RequesterRecipientProvider']], + body: "${currentBuild.fullDisplayName} is reported as ${currentBuild.currentResult}") + } + } + } +} diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000000..ff77379631 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.adoc b/README.adoc new file mode 100644 index 0000000000..61b956fbfc --- /dev/null +++ b/README.adoc @@ -0,0 +1,231 @@ +image:https://spring.io/badges/spring-data-mongodb/ga.svg[Spring Data MongoDB,link=https://spring.io/projects/spring-data-mongodb#quick-start] image:https://spring.io/badges/spring-data-mongodb/snapshot.svg[Spring Data MongoDB,link=https://spring.io/projects/spring-data-mongodb#quick-start] + += Spring Data MongoDB image:https://jenkins.spring.io/buildStatus/icon?job=spring-data-mongodb%2Fmain&subject=Build[link=https://jenkins.spring.io/view/SpringData/job/spring-data-mongodb/] image:https://img.shields.io/badge/Revved%20up%20by-Develocity-06A0CE?logo=Gradle&labelColor=02303A["Revved up by Develocity", link="https://ge.spring.io/scans?search.rootProjectNames=Spring Data MongoDB"] + +The primary goal of the https://spring.io/projects/spring-data[Spring Data] project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services. + +The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities. +The Spring Data MongoDB project provides integration with the MongoDB document database. +Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB `+Document+` and easily writing a repository style data access layer. + +[[code-of-conduct]] +== Code of Conduct + +This project is governed by the https://github.com/spring-projects/.github/blob/e3cc2ff230d8f1dca06535aa6b5a4a23815861d4/CODE_OF_CONDUCT.md[Spring Code of Conduct]. By participating, you are expected to uphold this code of conduct. Please report unacceptable behavior to spring-code-of-conduct@pivotal.io. + +[[getting-started]] +== Getting Started + +Here is a quick teaser of an application using Spring Data Repositories in Java: + +[source,java] +---- +public interface PersonRepository extends CrudRepository { + + List findByLastname(String lastname); + + List findByFirstnameLike(String firstname); +} + +@Service +public class MyService { + + private final PersonRepository repository; + + public MyService(PersonRepository repository) { + this.repository = repository; + } + + public void doWork() { + + repository.deleteAll(); + + Person person = new Person(); + person.setFirstname("Oliver"); + person.setLastname("Gierke"); + repository.save(person); + + List lastNameResults = repository.findByLastname("Gierke"); + List firstNameResults = repository.findByFirstnameLike("Oli*"); + } +} + +@Configuration +@EnableMongoRepositories +class ApplicationConfig extends AbstractMongoClientConfiguration { + + @Override + protected String getDatabaseName() { + return "springdata"; + } +} +---- + +[[maven-configuration]] +=== Maven configuration + +Add the Maven dependency: + +[source,xml] +---- + + org.springframework.data + spring-data-mongodb + ${version} + +---- + +If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository +and declare the appropriate dependency version. + +[source,xml] +---- + + org.springframework.data + spring-data-mongodb + ${version}-SNAPSHOT + + + + spring-snapshot + Spring Snapshot Repository + https://repo.spring.io/snapshot + +---- + +[[upgrading]] +== Upgrading + +Instructions for how to upgrade from earlier versions of Spring Data are provided on the project https://github.com/spring-projects/spring-data-commons/wiki[wiki]. +Follow the links in the https://github.com/spring-projects/spring-data-commons/wiki#release-notes[release notes section] to find the version that you want to upgrade to. + +[[getting-help]] +== Getting Help + +Having trouble with Spring Data? We’d love to help! + +* Check the +https://docs.spring.io/spring-data/mongodb/reference/[reference documentation], and https://docs.spring.io/spring-data/mongodb/docs/current/api/[Javadocs] +* Learn the Spring basics – Spring Data builds on Spring Framework, check the https://spring.io[spring.io] web-site for a wealth of reference documentation. +If you are just starting out with Spring, try one of the https://spring.io/guides[guides]. +* If you are upgrading, check out the https://docs.spring.io/spring-data/mongodb/docs/current/changelog.txt[changelog] for "`new and noteworthy`" features. +* Ask a question - we monitor https://stackoverflow.com[stackoverflow.com] for questions tagged with https://stackoverflow.com/tags/spring-data[`spring-data-mongodb`]. +* Report bugs with Spring Data MongoDB at https://github.com/spring-projects/spring-data-mongodb/issues[github.com/spring-projects/spring-data-mongodb/issues]. + +[[reporting-issues]] +== Reporting Issues + +Spring Data uses Github as issue tracking system to record bugs and feature requests. +If you want to raise an issue, please follow the recommendations below: + +* Before you log a bug, please search the https://github.com/spring-projects/spring-data-mongodb/issues[issue tracker] to see if someone has already reported the problem. +* If the issue does not already exist, https://github.com/spring-projects/spring-data-mongodb/issues/new[create a new issue]. +* Please provide as much information as possible with the issue report, we like to know the version of Spring Data that you are using, the JVM version, Stacktrace, etc. +* If you need to paste code, or include a stack trace use https://guides.github.com/features/mastering-markdown/[Markdown] code fences +++```+++. + +[[guides]] +== Guides + +The https://spring.io/[spring.io] site contains several guides that show how to use Spring Data step-by-step: + +* https://spring.io/guides/gs/accessing-data-mongodb/[Accessing Data with MongoDB] is a very basic guide that shows you how to create a simple application and how to access data using repositories. +* https://spring.io/guides/gs/accessing-mongodb-data-rest/[Accessing MongoDB Data with REST] is a guide to creating a REST web service exposing data stored in MongoDB through repositories. + +[[examples]] +== Examples + +* https://github.com/spring-projects/spring-data-examples/[Spring Data Examples] contains example projects that explain specific features in more detail. + +[[building-from-source]] +== Building from Source + +You do not need to build from source to use Spring Data. Binaries are available in https://repo.spring.io[repo.spring.io] +and accessible from Maven using the Maven configuration noted <>. + +NOTE: Configuration for Gradle is similar to Maven. + +The best way to get started is by creating a Spring Boot project using MongoDB on https://start.spring.io[start.spring.io]. +Follow this https://start.spring.io/#type=maven-project&language=java&platformVersion=3.0.0&packaging=jar&jvmVersion=17&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb[link] +to build an imperative application and this https://start.spring.io/#type=maven-project&language=java&platformVersion=3.0.0&packaging=jar&jvmVersion=17&groupId=com.example&artifactId=demo&name=demo&description=Demo%20project%20for%20Spring%20Boot&packageName=com.example.demo&dependencies=data-mongodb-reactive[link] +to build a reactive one. + +However, if you want to try out the latest and greatest, Spring Data MongoDB can be easily built with the https://github.com/takari/maven-wrapper[Maven wrapper] +and minimally, JDK 17 (https://www.oracle.com/java/technologies/downloads/[JDK downloads]). + +In order to build Spring Data MongoDB, you will need to https://www.mongodb.com/try/download/community[download] +and https://docs.mongodb.com/manual/installation/[install a MongoDB distribution]. + +Once you have installed MongoDB, you need to start a MongoDB server. It is convenient to set an environment variable to +your MongoDB installation directory (e.g. `MONGODB_HOME`). + +To run the full test suite, a https://docs.mongodb.com/manual/tutorial/deploy-replica-set/[MongoDB Replica Set] +is required. + +To run the MongoDB server enter the following command from a command-line: + +[source,bash] +---- +$ $MONGODB_HOME/bin/mongod --dbpath $MONGODB_HOME/runtime/data --ipv6 --port 27017 --replSet rs0 +... +"msg":"Successfully connected to host" +---- + +Once the MongoDB server starts up, you should see the message (`msg`), "_Successfully connected to host_". + +Notice the `--dbpath` option to the `mongod` command. You can set this to anything you like, but in this case, we set +the absolute path to a sub-directory (`runtime/data/`) under the MongoDB installation directory (in `$MONGODB_HOME`). + +You need to initialize the MongoDB replica set only once on the first time the MongoDB server is started. +To initialize the replica set, start a mongo client: + +[source,bash] +---- +$ $MONGODB_HOME/bin/mongo +MongoDB server version: 6.0.0 +... +---- + +Then enter the following command: + +[source,bash] +---- +mongo> rs.initiate({ _id: 'rs0', members: [ { _id: 0, host: '127.0.0.1:27017' } ] }) +---- + +Finally, on UNIX-based system (for example, Linux or Mac OS X) you may need to adjust the `ulimit`. +In case you need to, you can adjust the `ulimit` with the following command (32768 is just a recommendation): + +[source,bash] +---- +$ ulimit -n 32768 +---- + +You can use `ulimit -a` again to verify the `ulimit` for "_open files_" was set appropriately. + +Now you are ready to build Spring Data MongoDB. Simply enter the following `mvnw` (Maven Wrapper) command: + +[source,bash] +---- + $ ./mvnw clean install +---- + +If you want to build with the regular `mvn` command, you will need https://maven.apache.org/run-maven/index.html[Maven v3.8.0 or above]. + +_Also see link:CONTRIBUTING.adoc[CONTRIBUTING.adoc] if you wish to submit pull requests, and in particular, please sign +the https://cla.pivotal.io/sign/spring[Contributor’s Agreement] before your first non-trivial change._ + +=== Building reference documentation + +Building the documentation builds also the project without running tests. + +[source,bash] +---- + $ ./mvnw clean install -Pantora +---- + +The generated documentation is available from `target/antora/site/index.html`. + +[[license]] +== License + +Spring Data MongoDB is Open Source software released under the https://www.apache.org/licenses/LICENSE-2.0.html[Apache 2.0 license]. diff --git a/README.md b/README.md deleted file mode 100644 index c7b64f93f5..0000000000 --- a/README.md +++ /dev/null @@ -1,186 +0,0 @@ -[![Spring Data MongoDB](https://spring.io/badges/spring-data-mongodb/ga.svg)](http://projects.spring.io/spring-data-mongodb#quick-start) -[![Spring Data MongoDB](https://spring.io/badges/spring-data-mongodb/snapshot.svg)](http://projects.spring.io/spring-data-mongodb#quick-start) - -# Spring Data MongoDB - -The primary goal of the [Spring Data](http://projects.spring.io/spring-data) project is to make it easier to build Spring-powered applications that use new data access technologies such as non-relational databases, map-reduce frameworks, and cloud based data services. - -The Spring Data MongoDB project aims to provide a familiar and consistent Spring-based programming model for new datastores while retaining store-specific features and capabilities. The Spring Data MongoDB project provides integration with the MongoDB document database. Key functional areas of Spring Data MongoDB are a POJO centric model for interacting with a MongoDB DBCollection and easily writing a repository style data access layer. - -## Getting Help - -For a comprehensive treatment of all the Spring Data MongoDB features, please refer to: - -* the [User Guide](http://docs.spring.io/spring-data/mongodb/docs/current/reference/html/) -* the [JavaDocs](http://docs.spring.io/spring-data/mongodb/docs/current/api/) have extensive comments in them as well. -* the home page of [Spring Data MongoDB](http://projects.spring.io/spring-data-mongodb) contains links to articles and other resources. -* for more detailed questions, use [Spring Data Mongodb on Stackoverflow](http://stackoverflow.com/questions/tagged/spring-data-mongodb). - -If you are new to Spring as well as to Spring Data, look for information about [Spring projects](http://projects.spring.io/). - - -## Quick Start - -### Maven configuration - -Add the Maven dependency: - -```xml - - org.springframework.data - spring-data-mongodb - ${version}.RELEASE - -``` - -If you'd rather like the latest snapshots of the upcoming major version, use our Maven snapshot repository and declare the appropriate dependency version. - -```xml - - org.springframework.data - spring-data-mongodb - ${version}.BUILD-SNAPSHOT - - - - spring-libs-snapshot - Spring Snapshot Repository - http://repo.spring.io/libs-snapshot - -``` - -### MongoTemplate - -MongoTemplate is the central support class for Mongo database operations. It provides: - -* Basic POJO mapping support to and from BSON -* Convenience methods to interact with the store (insert object, update objects) and MongoDB specific ones (geo-spatial operations, upserts, map-reduce etc.) -* Connection affinity callback -* Exception translation into Spring's [technology agnostic DAO exception hierarchy](http://docs.spring.io/spring/docs/current/spring-framework-reference/html/dao.html#dao-exceptions). - -### Spring Data repositories - -To simplify the creation of data repositories Spring Data MongoDB provides a generic repository programming model. It will automatically create a repository proxy for you that adds implementations of finder methods you specify on an interface. - -For example, given a `Person` class with first and last name properties, a `PersonRepository` interface that can query for `Person` by last name and when the first name matches a like expression is shown below: - -```java -public interface PersonRepository extends CrudRepository { - - List findByLastname(String lastname); - - List findByFirstnameLike(String firstname); -} -``` - -The queries issued on execution will be derived from the method name. Extending `CrudRepository` causes CRUD methods being pulled into the interface so that you can easily save and find single entities and collections of them. - -You can have Spring automatically create a proxy for the interface by using the following JavaConfig: - -```java -@Configuration -@EnableMongoRepositories -class ApplicationConfig extends AbstractMongoConfiguration { - - @Override - public MongoClient mongoClient() throws Exception { - return new MongoClient(); - } - - @Override - protected String getDatabaseName() { - return "springdata"; - } -} -``` - -This sets up a connection to a local MongoDB instance and enables the detection of Spring Data repositories (through `@EnableMongoRepositories`). The same configuration would look like this in XML: - -```xml - - - - - - - - - - - -``` - -This will find the repository interface and register a proxy object in the container. You can use it as shown below: - -```java -@Service -public class MyService { - - private final PersonRepository repository; - - @Autowired - public MyService(PersonRepository repository) { - this.repository = repository; - } - - public void doWork() { - - repository.deleteAll(); - - Person person = new Person(); - person.setFirstname("Oliver"); - person.setLastname("Gierke"); - person = repository.save(person); - - List lastNameResults = repository.findByLastname("Gierke"); - List firstNameResults = repository.findByFirstnameLike("Oli*"); - } -} -``` - -### MongoDB 4.0 Transactions - -As of version 4 MongoDB supports [Transactions](https://www.mongodb.com/transactions). Transactions are built on top of - `ClientSessions` and therefore require an active session. - -`MongoTransactionManager` is the gateway to the well known Spring transaction support. It allows applications to use -[managed transaction features of Spring](http://docs.spring.io/spring/docs/current/spring-framework-reference/html/transaction.html). -The `MongoTransactionManager` binds a `ClientSession` to the thread. `MongoTemplate` automatically detects those and operates on them accordingly. - -```java -@Configuration -static class Config extends AbstractMongoConfiguration { - - @Bean - MongoTransactionManager transactionManager(MongoDbFactory dbFactory) { - return new MongoTransactionManager(dbFactory); - } - - // ... -} - -@Component -public class StateService { - - @Transactional - void someBusinessFunction(Step step) { - - template.insert(step); - - process(step); - - template.update(Step.class).apply(Update.set("state", // ... - }; -}); -``` - -## Contributing to Spring Data - -Here are some ways for you to get involved in the community: - -* Get involved with the Spring community on Stackoverflow and help out on the [spring-data-mongodb](http://stackoverflow.com/questions/tagged/spring-data-mongodb) tag by responding to questions and joining the debate. -* Create [JIRA](https://jira.spring.io/browse/DATAMONGO) tickets for bugs and new features and comment and vote on the ones that you are interested in. -* Github is for social coding: if you want to write code, we encourage contributions through pull requests from [forks of this repository](http://help.github.com/forking/). If you want to contribute code this way, please reference a JIRA ticket as well covering the specific issue you are addressing. -* Watch for upcoming articles on Spring by [subscribing](http://spring.io/blog) to spring.io. - -Before we accept a non-trivial patch or pull request we will need you to [sign the Contributor License Agreement](https://cla.pivotal.io/sign/spring). Signing the contributor’s agreement does not grant anyone commit rights to the main repository, but it does mean that we can accept your contributions, and you will get an author credit if we do. If you forget to do so, you'll be reminded when you submit a pull request. Active contributors might be asked to join the core team, and given the ability to merge pull requests. diff --git a/SECURITY.adoc b/SECURITY.adoc new file mode 100644 index 0000000000..9c518d999a --- /dev/null +++ b/SECURITY.adoc @@ -0,0 +1,9 @@ +# Security Policy + +## Supported Versions + +Please see the https://spring.io/projects/spring-data-mongodb[Spring Data MongoDB] project page for supported versions. + +## Reporting a Vulnerability + +Please don't raise security vulnerabilities here. Head over to https://pivotal.io/security to learn how to disclose them responsibly. diff --git a/ci/README.adoc b/ci/README.adoc new file mode 100644 index 0000000000..f1c11d8496 --- /dev/null +++ b/ci/README.adoc @@ -0,0 +1,39 @@ +== Running CI tasks locally + +Since Concourse is built on top of Docker, it's easy to: + +* Debug what went wrong on your local machine. +* Test out a a tweak to your `test.sh` script before sending it out. +* Experiment against a new image before submitting your pull request. + +All of these use cases are great reasons to essentially run what Concourse does on your local machine. + +IMPORTANT: To do this you must have Docker installed on your machine. + +1. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github springci/spring-data-openjdk17-with-mongodb-5.0.3 /bin/bash` ++ +This will launch the Docker image and mount your source code at `spring-data-mongodb-github`. ++ +Next, run the `test.sh` script from inside the container: ++ +2. `PROFILE=none spring-data-mongodb-github/ci/test.sh` + +Since the container is binding to your source, you can make edits from your IDE and continue to run build jobs. + +If you need to test the `build.sh` script, do this: + +1. `mkdir /tmp/spring-data-mongodb-artifactory` +2. `docker run -it --mount type=bind,source="$(pwd)",target=/spring-data-mongodb-github --mount type=bind,source="/tmp/spring-data-mongodb-artifactory",target=/spring-data-mongodb-artifactory springci/spring-data-openjdk17-with-mongodb-5.0.3 /bin/bash` ++ +This will launch the Docker image and mount your source code at `spring-data-mongodb-github` and the temporary +artifactory output directory at `spring-data-mongodb-artifactory`. ++ +Next, run the `build.sh` script from inside the container: ++ +3. `spring-data-mongodb-github/ci/build.sh` + +IMPORTANT: `build.sh` doesn't actually push to Artifactory so don't worry about accidentally deploying anything. +It just deploys to a local folder. That way, the `artifactory-resource` later in the pipeline can pick up these artifacts +and deliver them to artifactory. + +NOTE: Docker containers can eat up disk space fast! From time to time, run `docker system prune` to clean out old images. diff --git a/ci/openjdk24-mongodb-8.0/Dockerfile b/ci/openjdk24-mongodb-8.0/Dockerfile new file mode 100644 index 0000000000..0cb80001bf --- /dev/null +++ b/ci/openjdk24-mongodb-8.0/Dockerfile @@ -0,0 +1,25 @@ +ARG BASE +FROM ${BASE} +# Any ARG statements before FROM are cleared. +ARG MONGODB + +ENV TZ=Etc/UTC +ENV DEBIAN_FRONTEND=noninteractive +ENV MONGO_VERSION=${MONGODB} + +RUN set -eux; \ + sed -i -e 's/archive.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/security.ubuntu.com/mirror.one.com/g' /etc/apt/sources.list && \ + sed -i -e 's/ports.ubuntu.com/mirrors.ocf.berkeley.edu/g' /etc/apt/sources.list && \ + sed -i -e 's/http/https/g' /etc/apt/sources.list && \ + apt-get update && apt-get install -y apt-transport-https apt-utils gnupg2 wget && \ + # MongoDB 8.0 release signing key + wget -qO - https://www.mongodb.org/static/pgp/server-8.0.asc | apt-key add - && \ + # Needed when MongoDB creates a 8.0 folder. + echo "deb [ arch=amd64,arm64 ] https://repo.mongodb.org/apt/ubuntu noble/mongodb-org/8.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-8.0.list && \ + echo ${TZ} > /etc/timezone + +RUN apt-get update && \ + apt-get install -y mongodb-org=${MONGODB} mongodb-org-server=${MONGODB} mongodb-org-shell=${MONGODB} mongodb-org-mongos=${MONGODB} mongodb-org-tools=${MONGODB} && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* diff --git a/ci/pipeline.properties b/ci/pipeline.properties new file mode 100644 index 0000000000..4beebb0dfe --- /dev/null +++ b/ci/pipeline.properties @@ -0,0 +1,30 @@ +# Java versions +java.main.tag=24.0.1_9-jdk-noble +java.next.tag=24.0.1_9-jdk-noble + +# Docker container images - standard +docker.java.main.image=library/eclipse-temurin:${java.main.tag} +docker.java.next.image=library/eclipse-temurin:${java.next.tag} + +# Supported versions of MongoDB +docker.mongodb.8.0.version=8.0.9 + +# Supported versions of Redis +docker.redis.6.version=6.2.13 +docker.redis.7.version=7.2.4 +docker.valkey.8.version=8.1.1 + +# Docker environment settings +docker.java.inside.basic=-v $HOME:/tmp/jenkins-home +docker.java.inside.docker=-u root -v /var/run/docker.sock:/var/run/docker.sock -v /usr/bin/docker:/usr/bin/docker -v $HOME:/tmp/jenkins-home + +# Credentials +docker.registry= +docker.credentials=hub.docker.com-springbuildmaster +docker.proxy.registry=https://docker-hub.usw1.packages.broadcom.com +docker.proxy.credentials=usw1_packages_broadcom_com-jenkins-token +artifactory.credentials=02bd1690-b54f-4c9f-819d-a77cb7a9822c +artifactory.url=https://repo.spring.io +artifactory.repository.snapshot=libs-snapshot-local +develocity.access-key=gradle_enterprise_secret_access_key +jenkins.user.name=spring-builds+jenkins diff --git a/ci/start-replica.sh b/ci/start-replica.sh new file mode 100755 index 0000000000..9124976f39 --- /dev/null +++ b/ci/start-replica.sh @@ -0,0 +1,6 @@ +#!/bin/sh +mkdir -p /tmp/mongodb/db /tmp/mongodb/log +mongod --setParameter transactionLifetimeLimitSeconds=90 --setParameter maxTransactionLockRequestTimeoutMillis=10000 --dbpath /tmp/mongodb/db --replSet rs0 --fork --logpath /tmp/mongodb/log/mongod.log & +sleep 10 +mongosh --eval "rs.initiate({_id: 'rs0', members:[{_id: 0, host: '127.0.0.1:27017'}]});" +sleep 15 diff --git a/mvnw b/mvnw new file mode 100755 index 0000000000..8b9da3b8b6 --- /dev/null +++ b/mvnw @@ -0,0 +1,286 @@ +#!/bin/sh +# ---------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ---------------------------------------------------------------------------- + +# ---------------------------------------------------------------------------- +# Maven2 Start Up Batch script +# +# Required ENV vars: +# ------------------ +# JAVA_HOME - location of a JDK home dir +# +# Optional ENV vars +# ----------------- +# M2_HOME - location of maven2's installed home dir +# MAVEN_OPTS - parameters passed to the Java VM when running Maven +# e.g. to debug Maven itself, use +# set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +# MAVEN_SKIP_RC - flag to disable loading of mavenrc files +# ---------------------------------------------------------------------------- + +if [ -z "$MAVEN_SKIP_RC" ] ; then + + if [ -f /etc/mavenrc ] ; then + . /etc/mavenrc + fi + + if [ -f "$HOME/.mavenrc" ] ; then + . "$HOME/.mavenrc" + fi + +fi + +# OS specific support. $var _must_ be set to either true or false. +cygwin=false; +darwin=false; +mingw=false +case "`uname`" in + CYGWIN*) cygwin=true ;; + MINGW*) mingw=true;; + Darwin*) darwin=true + # Use /usr/libexec/java_home if available, otherwise fall back to /Library/Java/Home + # See https://developer.apple.com/library/mac/qa/qa1170/_index.html + if [ -z "$JAVA_HOME" ]; then + if [ -x "/usr/libexec/java_home" ]; then + export JAVA_HOME="`/usr/libexec/java_home`" + else + export JAVA_HOME="/Library/Java/Home" + fi + fi + ;; +esac + +if [ -z "$JAVA_HOME" ] ; then + if [ -r /etc/gentoo-release ] ; then + JAVA_HOME=`java-config --jre-home` + fi +fi + +if [ -z "$M2_HOME" ] ; then + ## resolve links - $0 may be a link to maven's home + PRG="$0" + + # need this for relative symlinks + while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG="`dirname "$PRG"`/$link" + fi + done + + saveddir=`pwd` + + M2_HOME=`dirname "$PRG"`/.. + + # make it fully qualified + M2_HOME=`cd "$M2_HOME" && pwd` + + cd "$saveddir" + # echo Using m2 at $M2_HOME +fi + +# For Cygwin, ensure paths are in UNIX format before anything is touched +if $cygwin ; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --unix "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --unix "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --unix "$CLASSPATH"` +fi + +# For Mingw, ensure paths are in UNIX format before anything is touched +if $mingw ; then + [ -n "$M2_HOME" ] && + M2_HOME="`(cd "$M2_HOME"; pwd)`" + [ -n "$JAVA_HOME" ] && + JAVA_HOME="`(cd "$JAVA_HOME"; pwd)`" + # TODO classpath? +fi + +if [ -z "$JAVA_HOME" ]; then + javaExecutable="`which javac`" + if [ -n "$javaExecutable" ] && ! [ "`expr \"$javaExecutable\" : '\([^ ]*\)'`" = "no" ]; then + # readlink(1) is not available as standard on Solaris 10. + readLink=`which readlink` + if [ ! `expr "$readLink" : '\([^ ]*\)'` = "no" ]; then + if $darwin ; then + javaHome="`dirname \"$javaExecutable\"`" + javaExecutable="`cd \"$javaHome\" && pwd -P`/javac" + else + javaExecutable="`readlink -f \"$javaExecutable\"`" + fi + javaHome="`dirname \"$javaExecutable\"`" + javaHome=`expr "$javaHome" : '\(.*\)/bin'` + JAVA_HOME="$javaHome" + export JAVA_HOME + fi + fi +fi + +if [ -z "$JAVACMD" ] ; then + if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + else + JAVACMD="`which java`" + fi +fi + +if [ ! -x "$JAVACMD" ] ; then + echo "Error: JAVA_HOME is not defined correctly." >&2 + echo " We cannot execute $JAVACMD" >&2 + exit 1 +fi + +if [ -z "$JAVA_HOME" ] ; then + echo "Warning: JAVA_HOME environment variable is not set." +fi + +CLASSWORLDS_LAUNCHER=org.codehaus.plexus.classworlds.launcher.Launcher + +# traverses directory structure from process work directory to filesystem root +# first directory with .mvn subdirectory is considered project base directory +find_maven_basedir() { + + if [ -z "$1" ] + then + echo "Path not specified to find_maven_basedir" + return 1 + fi + + basedir="$1" + wdir="$1" + while [ "$wdir" != '/' ] ; do + if [ -d "$wdir"/.mvn ] ; then + basedir=$wdir + break + fi + # workaround for JBEAP-8937 (on Solaris 10/Sparc) + if [ -d "${wdir}" ]; then + wdir=`cd "$wdir/.."; pwd` + fi + # end of workaround + done + echo "${basedir}" +} + +# concatenates all lines of a file +concat_lines() { + if [ -f "$1" ]; then + echo "$(tr -s '\n' ' ' < "$1")" + fi +} + +BASE_DIR=`find_maven_basedir "$(pwd)"` +if [ -z "$BASE_DIR" ]; then + exit 1; +fi + +########################################################################################## +# Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +# This allows using the maven wrapper in projects that prohibit checking in binary data. +########################################################################################## +if [ -r "$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found .mvn/wrapper/maven-wrapper.jar" + fi +else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Couldn't find .mvn/wrapper/maven-wrapper.jar, downloading it ..." + fi + jarUrl="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar" + while IFS="=" read key value; do + case "$key" in (wrapperUrl) jarUrl="$value"; break ;; + esac + done < "$BASE_DIR/.mvn/wrapper/maven-wrapper.properties" + if [ "$MVNW_VERBOSE" = true ]; then + echo "Downloading from: $jarUrl" + fi + wrapperJarPath="$BASE_DIR/.mvn/wrapper/maven-wrapper.jar" + + if command -v wget > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found wget ... using wget" + fi + wget "$jarUrl" -O "$wrapperJarPath" + elif command -v curl > /dev/null; then + if [ "$MVNW_VERBOSE" = true ]; then + echo "Found curl ... using curl" + fi + curl -o "$wrapperJarPath" "$jarUrl" + else + if [ "$MVNW_VERBOSE" = true ]; then + echo "Falling back to using Java to download" + fi + javaClass="$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.java" + if [ -e "$javaClass" ]; then + if [ ! -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Compiling MavenWrapperDownloader.java ..." + fi + # Compiling the Java class + ("$JAVA_HOME/bin/javac" "$javaClass") + fi + if [ -e "$BASE_DIR/.mvn/wrapper/MavenWrapperDownloader.class" ]; then + # Running the downloader + if [ "$MVNW_VERBOSE" = true ]; then + echo " - Running MavenWrapperDownloader.java ..." + fi + ("$JAVA_HOME/bin/java" -cp .mvn/wrapper MavenWrapperDownloader "$MAVEN_PROJECTBASEDIR") + fi + fi + fi +fi +########################################################################################## +# End of extension +########################################################################################## + +export MAVEN_PROJECTBASEDIR=${MAVEN_BASEDIR:-"$BASE_DIR"} +if [ "$MVNW_VERBOSE" = true ]; then + echo $MAVEN_PROJECTBASEDIR +fi +MAVEN_OPTS="$(concat_lines "$MAVEN_PROJECTBASEDIR/.mvn/jvm.config") $MAVEN_OPTS" + +# For Cygwin, switch paths to Windows format before running java +if $cygwin; then + [ -n "$M2_HOME" ] && + M2_HOME=`cygpath --path --windows "$M2_HOME"` + [ -n "$JAVA_HOME" ] && + JAVA_HOME=`cygpath --path --windows "$JAVA_HOME"` + [ -n "$CLASSPATH" ] && + CLASSPATH=`cygpath --path --windows "$CLASSPATH"` + [ -n "$MAVEN_PROJECTBASEDIR" ] && + MAVEN_PROJECTBASEDIR=`cygpath --path --windows "$MAVEN_PROJECTBASEDIR"` +fi + +WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +exec "$JAVACMD" \ + $MAVEN_OPTS \ + -classpath "$MAVEN_PROJECTBASEDIR/.mvn/wrapper/maven-wrapper.jar" \ + "-Dmaven.home=${M2_HOME}" "-Dmaven.multiModuleProjectDirectory=${MAVEN_PROJECTBASEDIR}" \ + ${WRAPPER_LAUNCHER} $MAVEN_CONFIG "$@" diff --git a/mvnw.cmd b/mvnw.cmd new file mode 100755 index 0000000000..fef5a8f7f9 --- /dev/null +++ b/mvnw.cmd @@ -0,0 +1,161 @@ +@REM ---------------------------------------------------------------------------- +@REM Licensed to the Apache Software Foundation (ASF) under one +@REM or more contributor license agreements. See the NOTICE file +@REM distributed with this work for additional information +@REM regarding copyright ownership. The ASF licenses this file +@REM to you under the Apache License, Version 2.0 (the +@REM "License"); you may not use this file except in compliance +@REM with the License. You may obtain a copy of the License at +@REM +@REM https://www.apache.org/licenses/LICENSE-2.0 +@REM +@REM Unless required by applicable law or agreed to in writing, +@REM software distributed under the License is distributed on an +@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +@REM KIND, either express or implied. See the License for the +@REM specific language governing permissions and limitations +@REM under the License. +@REM ---------------------------------------------------------------------------- + +@REM ---------------------------------------------------------------------------- +@REM Maven2 Start Up Batch script +@REM +@REM Required ENV vars: +@REM JAVA_HOME - location of a JDK home dir +@REM +@REM Optional ENV vars +@REM M2_HOME - location of maven2's installed home dir +@REM MAVEN_BATCH_ECHO - set to 'on' to enable the echoing of the batch commands +@REM MAVEN_BATCH_PAUSE - set to 'on' to wait for a key stroke before ending +@REM MAVEN_OPTS - parameters passed to the Java VM when running Maven +@REM e.g. to debug Maven itself, use +@REM set MAVEN_OPTS=-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 +@REM MAVEN_SKIP_RC - flag to disable loading of mavenrc files +@REM ---------------------------------------------------------------------------- + +@REM Begin all REM lines with '@' in case MAVEN_BATCH_ECHO is 'on' +@echo off +@REM set title of command window +title %0 +@REM enable echoing my setting MAVEN_BATCH_ECHO to 'on' +@if "%MAVEN_BATCH_ECHO%" == "on" echo %MAVEN_BATCH_ECHO% + +@REM set %HOME% to equivalent of $HOME +if "%HOME%" == "" (set "HOME=%HOMEDRIVE%%HOMEPATH%") + +@REM Execute a user defined script before this one +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPre +@REM check for pre script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_pre.bat" call "%HOME%\mavenrc_pre.bat" +if exist "%HOME%\mavenrc_pre.cmd" call "%HOME%\mavenrc_pre.cmd" +:skipRcPre + +@setlocal + +set ERROR_CODE=0 + +@REM To isolate internal variables from possible post scripts, we use another setlocal +@setlocal + +@REM ==== START VALIDATION ==== +if not "%JAVA_HOME%" == "" goto OkJHome + +echo. +echo Error: JAVA_HOME not found in your environment. >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +:OkJHome +if exist "%JAVA_HOME%\bin\java.exe" goto init + +echo. +echo Error: JAVA_HOME is set to an invalid directory. >&2 +echo JAVA_HOME = "%JAVA_HOME%" >&2 +echo Please set the JAVA_HOME variable in your environment to match the >&2 +echo location of your Java installation. >&2 +echo. +goto error + +@REM ==== END VALIDATION ==== + +:init + +@REM Find the project base dir, i.e. the directory that contains the folder ".mvn". +@REM Fallback to current working directory if not found. + +set MAVEN_PROJECTBASEDIR=%MAVEN_BASEDIR% +IF NOT "%MAVEN_PROJECTBASEDIR%"=="" goto endDetectBaseDir + +set EXEC_DIR=%CD% +set WDIR=%EXEC_DIR% +:findBaseDir +IF EXIST "%WDIR%"\.mvn goto baseDirFound +cd .. +IF "%WDIR%"=="%CD%" goto baseDirNotFound +set WDIR=%CD% +goto findBaseDir + +:baseDirFound +set MAVEN_PROJECTBASEDIR=%WDIR% +cd "%EXEC_DIR%" +goto endDetectBaseDir + +:baseDirNotFound +set MAVEN_PROJECTBASEDIR=%EXEC_DIR% +cd "%EXEC_DIR%" + +:endDetectBaseDir + +IF NOT EXIST "%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config" goto endReadAdditionalConfig + +@setlocal EnableExtensions EnableDelayedExpansion +for /F "usebackq delims=" %%a in ("%MAVEN_PROJECTBASEDIR%\.mvn\jvm.config") do set JVM_CONFIG_MAVEN_PROPS=!JVM_CONFIG_MAVEN_PROPS! %%a +@endlocal & set JVM_CONFIG_MAVEN_PROPS=%JVM_CONFIG_MAVEN_PROPS% + +:endReadAdditionalConfig + +SET MAVEN_JAVA_EXE="%JAVA_HOME%\bin\java.exe" +set WRAPPER_JAR="%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.jar" +set WRAPPER_LAUNCHER=org.apache.maven.wrapper.MavenWrapperMain + +set DOWNLOAD_URL="https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.4.2/maven-wrapper-0.4.2.jar" +FOR /F "tokens=1,2 delims==" %%A IN (%MAVEN_PROJECTBASEDIR%\.mvn\wrapper\maven-wrapper.properties) DO ( + IF "%%A"=="wrapperUrl" SET DOWNLOAD_URL=%%B +) + +@REM Extension to allow automatically downloading the maven-wrapper.jar from Maven-central +@REM This allows using the maven wrapper in projects that prohibit checking in binary data. +if exist %WRAPPER_JAR% ( + echo Found %WRAPPER_JAR% +) else ( + echo Couldn't find %WRAPPER_JAR%, downloading it ... + echo Downloading from: %DOWNLOAD_URL% + powershell -Command "(New-Object Net.WebClient).DownloadFile('%DOWNLOAD_URL%', '%WRAPPER_JAR%')" + echo Finished downloading %WRAPPER_JAR% +) +@REM End of extension + +%MAVEN_JAVA_EXE% %JVM_CONFIG_MAVEN_PROPS% %MAVEN_OPTS% %MAVEN_DEBUG_OPTS% -classpath %WRAPPER_JAR% "-Dmaven.multiModuleProjectDirectory=%MAVEN_PROJECTBASEDIR%" %WRAPPER_LAUNCHER% %MAVEN_CONFIG% %* +if ERRORLEVEL 1 goto error +goto end + +:error +set ERROR_CODE=1 + +:end +@endlocal & set ERROR_CODE=%ERROR_CODE% + +if not "%MAVEN_SKIP_RC%" == "" goto skipRcPost +@REM check for post script, once with legacy .bat ending and once with .cmd ending +if exist "%HOME%\mavenrc_post.bat" call "%HOME%\mavenrc_post.bat" +if exist "%HOME%\mavenrc_post.cmd" call "%HOME%\mavenrc_post.cmd" +:skipRcPost + +@REM pause the script if MAVEN_BATCH_PAUSE is set to 'on' +if "%MAVEN_BATCH_PAUSE%" == "on" pause + +if "%MAVEN_TERMINATE_CMD%" == "on" exit %ERROR_CODE% + +exit /B %ERROR_CODE% diff --git a/pom.xml b/pom.xml index 0ff20c5825..95fc8379d9 100644 --- a/pom.xml +++ b/pom.xml @@ -1,35 +1,33 @@ - + 4.0.0 org.springframework.data spring-data-mongodb-parent - 2.1.0.BUILD-SNAPSHOT + 5.0.0-SNAPSHOT pom Spring Data MongoDB MongoDB support for Spring Data - http://projects.spring.io/spring-data-mongodb + https://spring.io/projects/spring-data-mongodb org.springframework.data.build spring-data-parent - 2.1.0.BUILD-SNAPSHOT + 4.0.0-SNAPSHOT spring-data-mongodb - spring-data-mongodb-cross-store spring-data-mongodb-distribution multi spring-data-mongodb - 2.1.0.BUILD-SNAPSHOT - 3.8.0 - 1.9.0 + 4.0.0-SNAPSHOT + 5.5.0 1.19 @@ -39,7 +37,7 @@ Oliver Gierke ogierke at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Project Lead @@ -50,7 +48,7 @@ Thomas Risberg trisberg at vmware.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -61,7 +59,7 @@ Mark Pollack mpollack at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -72,7 +70,7 @@ Jon Brisbin jbrisbin at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -83,7 +81,7 @@ Thomas Darimont tdarimont at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -94,7 +92,7 @@ Christoph Strobl cstrobl at gopivotal.com Pivotal - http://www.gopivotal.com + https://pivotal.io Developer @@ -105,7 +103,7 @@ Mark Paluch mpaluch at pivotal.io Pivotal - http://www.pivotal.io + https://www.pivotal.io Developer @@ -113,59 +111,66 @@ - + + scm:git:https://github.com/spring-projects/spring-data-mongodb.git + scm:git:git@github.com:spring-projects/spring-data-mongodb.git + https://github.com/spring-projects/spring-data-mongodb + + + + GitHub + https://github.com/spring-projects/spring-data-mongodb/issues + + - release - - - - org.jfrog.buildinfo - artifactory-maven-plugin - false - - - + jmh + + + jitpack.io + https://jitpack.io + + - - benchmarks - - spring-data-mongodb - spring-data-mongodb-cross-store - spring-data-mongodb-distribution - spring-data-mongodb-benchmarks - + mongo-4.x + + 4.11.1 + 1.8.0 + - - - - org.mongodb - mongo-java-driver - ${mongo} - - + + + + + org.mongodb + mongodb-driver-bom + ${mongo} + pom + import + + + + - spring-libs-snapshot - https://repo.spring.io/libs-snapshot + spring-snapshot + https://repo.spring.io/snapshot + + true + + + false + + + + spring-milestone + https://repo.spring.io/milestone - - - spring-plugins-release - https://repo.spring.io/plugins-release - - - spring-libs-milestone - https://repo.spring.io/libs-milestone - - - - diff --git a/settings.xml b/settings.xml new file mode 100644 index 0000000000..b3227cc110 --- /dev/null +++ b/settings.xml @@ -0,0 +1,29 @@ + + + + + spring-plugins-release + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-snapshot + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-milestone + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + spring-libs-release + ${env.ARTIFACTORY_USR} + ${env.ARTIFACTORY_PSW} + + + + \ No newline at end of file diff --git a/spring-data-mongodb-benchmarks/README.md b/spring-data-mongodb-benchmarks/README.md deleted file mode 100644 index e11925b7fd..0000000000 --- a/spring-data-mongodb-benchmarks/README.md +++ /dev/null @@ -1,76 +0,0 @@ -# Benchmarks - -Benchmarks are based on [JMH](http://openjdk.java.net/projects/code-tools/jmh/). - -# Running Benchmarks - -Running benchmarks is disabled by default and can be activated via the `benchmarks` profile. -To run the benchmarks with default settings use. - -```bash -mvn -P benchmarks clean test -``` - -A basic report will be printed to the CLI. - -```bash -# Run complete. Total time: 00:00:15 - -Benchmark Mode Cnt Score Error Units -MappingMongoConverterBenchmark.readObject thrpt 10 1920157,631 ± 64310,809 ops/s -MappingMongoConverterBenchmark.writeObject thrpt 10 782732,857 ± 53804,130 ops/s -``` - -## Running all Benchmarks of a specific class - -To run all Benchmarks of a specific class, just provide its simple class name via the `benchmark` command line argument. - -```bash -mvn -P benchmarks clean test -D benchmark=MappingMongoConverterBenchmark -``` - -## Running a single Benchmark - -To run a single Benchmark provide its containing class simple name followed by `#` and the method name via the `benchmark` command line argument. - -```bash -mvn -P benchmarks clean test -D benchmark=MappingMongoConverterBenchmark#readObjectWith2Properties -``` - -# Saving Benchmark Results - -A detailed benchmark report is stored in JSON format in the `/target/reports/performance` directory. -To store the report in a different location use the `benchmarkReportDir` command line argument. - -## MongoDB - -Results can be directly piped to MongoDB by providing a valid [Connection String](https://docs.mongodb.com/manual/reference/connection-string/) via the `publishTo` command line argument. - -```bash -mvn -P benchmarks clean test -D publishTo=mongodb://127.0.0.1:27017 -``` - -NOTE: If the uri does not explicitly define a database the default `spring-data-mongodb-benchmarks` is used. - -## HTTP Endpoint - -The benchmark report can also be posted as `application/json` to an HTTP Endpoint by providing a valid URl via the `publishTo` command line argument. - -```bash -mvn -P benchmarks clean test -D publishTo=http://127.0.0.1:8080/capture-benchmarks -``` - -# Customizing Benchmarks - -Following options can be set via command line. - -Option | Default Value ---- | --- -warmupIterations | 10 -warmupTime | 1 (seconds) -measurementIterations | 10 -measurementTime | 1 (seconds) -forks | 1 -benchmarkReportDir | /target/reports/performance (always relative to project root dir) -benchmark | .* (single benchmark via `classname#benchmark`) -publishTo | \[not set\] (mongodb-uri or http-endpoint) \ No newline at end of file diff --git a/spring-data-mongodb-benchmarks/pom.xml b/spring-data-mongodb-benchmarks/pom.xml deleted file mode 100644 index 9baccaa905..0000000000 --- a/spring-data-mongodb-benchmarks/pom.xml +++ /dev/null @@ -1,111 +0,0 @@ - - - - 4.0.0 - - - org.springframework.data - spring-data-mongodb-parent - 2.1.0.BUILD-SNAPSHOT - ../pom.xml - - - spring-data-mongodb-benchmarks - jar - - Spring Data MongoDB - Microbenchmarks - - - - true - - - - - - ${project.groupId} - spring-data-mongodb - ${project.version} - - - - junit - junit - ${junit} - compile - - - - org.openjdk.jmh - jmh-core - ${jmh.version} - - - - org.openjdk.jmh - jmh-generator-annprocess - ${jmh.version} - provided - - - - - - - - benchmarks - - false - - - - - - - - pl.project13.maven - git-commit-id-plugin - 2.2.2 - - - - revision - - - - - - maven-jar-plugin - - - default-jar - never - - - - - maven-surefire-plugin - - ${project.build.sourceDirectory} - ${project.build.outputDirectory} - - **/AbstractMicrobenchmark.java - **/*$*.class - **/generated/*.class - - - **/*Benchmark* - - - ${project.build.directory}/reports/performance - ${project.version} - ${git.dirty} - ${git.commit.id} - ${git.branch} - - - - - - diff --git a/spring-data-mongodb-benchmarks/src/main/resources/logback.xml b/spring-data-mongodb-benchmarks/src/main/resources/logback.xml deleted file mode 100644 index bccb2dc4fa..0000000000 --- a/spring-data-mongodb-benchmarks/src/main/resources/logback.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - %d %5p %40.40c:%4L - %m%n - - - - - - - - \ No newline at end of file diff --git a/spring-data-mongodb-cross-store/aop.xml b/spring-data-mongodb-cross-store/aop.xml deleted file mode 100644 index d11b1549e8..0000000000 --- a/spring-data-mongodb-cross-store/aop.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/spring-data-mongodb-cross-store/pom.xml b/spring-data-mongodb-cross-store/pom.xml deleted file mode 100644 index 47a5b7aba7..0000000000 --- a/spring-data-mongodb-cross-store/pom.xml +++ /dev/null @@ -1,148 +0,0 @@ - - - - 4.0.0 - - - org.springframework.data - spring-data-mongodb-parent - 2.1.0.BUILD-SNAPSHOT - ../pom.xml - - - spring-data-mongodb-cross-store - Spring Data MongoDB - Cross-Store Support - - - 2.1.1 - 5.2.1.Final - spring.data.mongodb.cross.store - ${basedir}/.. - - - - - - - org.springframework - spring-beans - - - commons-logging - commons-logging - - - - - org.springframework - spring-tx - - - org.springframework - spring-aspects - - - org.springframework - spring-orm - - - - - org.springframework.data - spring-data-mongodb - 2.1.0.BUILD-SNAPSHOT - - - - - io.projectreactor - reactor-core - true - - - - org.aspectj - aspectjrt - ${aspectj} - - - - - org.eclipse.persistence - javax.persistence - ${jpa} - true - - - - - org.hibernate - hibernate-entitymanager - ${hibernate} - test - - - hsqldb - hsqldb - 1.8.0.10 - test - - - javax.validation - validation-api - ${validation} - test - - - org.hibernate - hibernate-validator - 5.2.4.Final - test - - - - - - - - org.codehaus.mojo - aspectj-maven-plugin - 1.6 - - - org.aspectj - aspectjrt - ${aspectj} - - - org.aspectj - aspectjtools - ${aspectj} - - - - - - compile - test-compile - - - - - true - - - org.springframework - spring-aspects - - - ${source.level} - ${source.level} - ${source.level} - aop.xml - - - - - - diff --git a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoChangeSetPersister.java b/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoChangeSetPersister.java deleted file mode 100644 index 2b9e07a9fe..0000000000 --- a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoChangeSetPersister.java +++ /dev/null @@ -1,214 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore; - -import javax.persistence.EntityManagerFactory; - -import org.bson.Document; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.dao.DataAccessException; -import org.springframework.dao.DataAccessResourceFailureException; -import org.springframework.dao.DataIntegrityViolationException; -import org.springframework.data.crossstore.ChangeSet; -import org.springframework.data.crossstore.ChangeSetBacked; -import org.springframework.data.crossstore.ChangeSetPersister; -import org.springframework.data.mongodb.core.CollectionCallback; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.util.ClassUtils; - -import com.mongodb.MongoException; -import com.mongodb.client.MongoCollection; -import com.mongodb.client.model.Filters; -import com.mongodb.client.result.DeleteResult; - -/** - * @author Thomas Risberg - * @author Oliver Gierke - * @author Alex Vengrovsk - * @author Mark Paluch - * @deprecated will be removed without replacement. - */ -@Deprecated -public class MongoChangeSetPersister implements ChangeSetPersister { - - private static final String ENTITY_CLASS = "_entity_class"; - private static final String ENTITY_ID = "_entity_id"; - private static final String ENTITY_FIELD_NAME = "_entity_field_name"; - private static final String ENTITY_FIELD_CLASS = "_entity_field_class"; - - private final Logger log = LoggerFactory.getLogger(getClass()); - - private MongoTemplate mongoTemplate; - private EntityManagerFactory entityManagerFactory; - - public void setMongoTemplate(MongoTemplate mongoTemplate) { - this.mongoTemplate = mongoTemplate; - } - - public void setEntityManagerFactory(EntityManagerFactory entityManagerFactory) { - this.entityManagerFactory = entityManagerFactory; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentState(java.lang.Class, java.lang.Object, org.springframework.data.crossstore.ChangeSet) - */ - public void getPersistentState(Class entityClass, Object id, final ChangeSet changeSet) - throws DataAccessException, NotFoundException { - - if (id == null) { - log.debug("Unable to load MongoDB data for null id"); - return; - } - - String collName = getCollectionNameForEntity(entityClass); - - final Document dbk = new Document(); - dbk.put(ENTITY_ID, id); - dbk.put(ENTITY_CLASS, entityClass.getName()); - if (log.isDebugEnabled()) { - log.debug("Loading MongoDB data for {}", dbk); - } - mongoTemplate.execute(collName, new CollectionCallback() { - public Object doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - for (Document dbo : collection.find(dbk)) { - String key = (String) dbo.get(ENTITY_FIELD_NAME); - if (log.isDebugEnabled()) { - log.debug("Processing key: {}", key); - } - if (!changeSet.getValues().containsKey(key)) { - String className = (String) dbo.get(ENTITY_FIELD_CLASS); - if (className == null) { - throw new DataIntegrityViolationException( - "Unble to convert property " + key + ": Invalid metadata, " + ENTITY_FIELD_CLASS + " not available"); - } - Class clazz = ClassUtils.resolveClassName(className, ClassUtils.getDefaultClassLoader()); - Object value = mongoTemplate.getConverter().read(clazz, dbo); - if (log.isDebugEnabled()) { - log.debug("Adding to ChangeSet: {}", key); - } - changeSet.set(key, value); - } - } - return null; - } - }); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.crossstore.ChangeSetPersister#getPersistentId(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet) - */ - public Object getPersistentId(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException { - if (log.isDebugEnabled()) { - log.debug("getPersistentId called on {}", entity); - } - if (entityManagerFactory == null) { - throw new DataAccessResourceFailureException("EntityManagerFactory cannot be null"); - } - - return entityManagerFactory.getPersistenceUnitUtil().getIdentifier(entity); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.crossstore.ChangeSetPersister#persistState(org.springframework.data.crossstore.ChangeSetBacked, org.springframework.data.crossstore.ChangeSet) - */ - public Object persistState(ChangeSetBacked entity, ChangeSet cs) throws DataAccessException { - if (cs == null) { - log.debug("Flush: changeset was null, nothing to flush."); - return 0L; - } - - if (log.isDebugEnabled()) { - log.debug("Flush: changeset: {}", cs.getValues()); - } - - String collName = getCollectionNameForEntity(entity.getClass()); - if (mongoTemplate.getCollection(collName) == null) { - mongoTemplate.createCollection(collName); - } - - for (String key : cs.getValues().keySet()) { - if (key != null && !key.startsWith("_") && !key.equals(ChangeSetPersister.ID_KEY)) { - Object value = cs.getValues().get(key); - final Document dbQuery = new Document(); - dbQuery.put(ENTITY_ID, getPersistentId(entity, cs)); - dbQuery.put(ENTITY_CLASS, entity.getClass().getName()); - dbQuery.put(ENTITY_FIELD_NAME, key); - final Document dbId = mongoTemplate.execute(collName, new CollectionCallback() { - public Document doInCollection(MongoCollection collection) - throws MongoException, DataAccessException { - Document id = collection.find(dbQuery).first(); - return id; - } - }); - - if (value == null) { - if (log.isDebugEnabled()) { - log.debug("Flush: removing: {}", dbQuery); - } - mongoTemplate.execute(collName, new CollectionCallback() { - public Object doInCollection(MongoCollection collection) - throws MongoException, DataAccessException { - DeleteResult dr = collection.deleteMany(dbQuery); - return null; - } - }); - } else { - final Document dbDoc = new Document(); - dbDoc.putAll(dbQuery); - if (log.isDebugEnabled()) { - log.debug("Flush: saving: {}", dbQuery); - } - mongoTemplate.getConverter().write(value, dbDoc); - dbDoc.put(ENTITY_FIELD_CLASS, value.getClass().getName()); - if (dbId != null) { - dbDoc.put("_id", dbId.get("_id")); - } - mongoTemplate.execute(collName, new CollectionCallback() { - public Object doInCollection(MongoCollection collection) - throws MongoException, DataAccessException { - - if (dbId != null) { - collection.replaceOne(Filters.eq("_id", dbId.get("_id")), dbDoc); - } else { - - if (dbDoc.containsKey("_id") && dbDoc.get("_id") == null) { - dbDoc.remove("_id"); - } - collection.insertOne(dbDoc); - } - return null; - } - }); - } - } - } - return 0L; - } - - /** - * Returns the collection the given entity type shall be persisted to. - * - * @param entityClass must not be {@literal null}. - * @return - */ - private String getCollectionNameForEntity(Class entityClass) { - return mongoTemplate.getCollectionName(entityClass); - } -} diff --git a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoDocumentBacking.aj b/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoDocumentBacking.aj deleted file mode 100644 index a032194c4b..0000000000 --- a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/MongoDocumentBacking.aj +++ /dev/null @@ -1,272 +0,0 @@ -/* - * Copyright 2011-2017 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore; - -import java.lang.reflect.Field; - -import javax.persistence.EntityManager; -import javax.persistence.Transient; -import javax.persistence.Entity; - -import org.aspectj.lang.JoinPoint; -import org.aspectj.lang.reflect.FieldSignature; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.dao.DataAccessException; -import org.springframework.data.mongodb.crossstore.RelatedDocument; -import org.springframework.data.mongodb.crossstore.DocumentBacked; -import org.springframework.data.crossstore.ChangeSetBackedTransactionSynchronization; -import org.springframework.data.crossstore.ChangeSet; -import org.springframework.data.crossstore.ChangeSetPersister; -import org.springframework.data.crossstore.ChangeSetPersister.NotFoundException; -import org.springframework.data.crossstore.HashMapChangeSet; -import org.springframework.transaction.support.TransactionSynchronizationManager; - -/** - * Aspect to turn an object annotated with @Document into a persistent document using Mongo. - * - * @author Thomas Risberg - * @deprecated will be removed without replacement. - */ -@Deprecated -public aspect MongoDocumentBacking { - - private static final Logger LOGGER = LoggerFactory.getLogger(MongoDocumentBacking.class); - - // Aspect shared config - private ChangeSetPersister changeSetPersister; - - public void setChangeSetPersister(ChangeSetPersister changeSetPersister) { - this.changeSetPersister = changeSetPersister; - } - - // ITD to introduce N state to Annotated objects - declare parents : (@Entity *) implements DocumentBacked; - - // The annotated fields that will be persisted in MongoDB rather than with JPA - declare @field: @RelatedDocument * (@Entity+ *).*:@Transient; - - // ------------------------------------------------------------------------- - // Advise user-defined constructors of ChangeSetBacked objects to create a new - // backing ChangeSet - // ------------------------------------------------------------------------- - pointcut arbitraryUserConstructorOfChangeSetBackedObject(DocumentBacked entity) : - execution((DocumentBacked+).new(..)) && - !execution((DocumentBacked+).new(ChangeSet)) && - this(entity); - - pointcut finderConstructorOfChangeSetBackedObject(DocumentBacked entity, ChangeSet cs) : - execution((DocumentBacked+).new(ChangeSet)) && - this(entity) && - args(cs); - - protected pointcut entityFieldGet(DocumentBacked entity) : - get(@RelatedDocument * DocumentBacked+.*) && - this(entity) && - !get(* DocumentBacked.*); - - protected pointcut entityFieldSet(DocumentBacked entity, Object newVal) : - set(@RelatedDocument * DocumentBacked+.*) && - this(entity) && - args(newVal) && - !set(* DocumentBacked.*); - - // intercept EntityManager.merge calls - public pointcut entityManagerMerge(EntityManager em, Object entity) : - call(* EntityManager.merge(Object)) && - target(em) && - args(entity); - - // intercept EntityManager.remove calls - // public pointcut entityManagerRemove(EntityManager em, Object entity) : - // call(* EntityManager.remove(Object)) && - // target(em) && - // args(entity); - - // move changeSet from detached entity to the newly merged persistent object - Object around(EntityManager em, Object entity) : entityManagerMerge(em, entity) { - Object mergedEntity = proceed(em, entity); - if (entity instanceof DocumentBacked && mergedEntity instanceof DocumentBacked) { - ((DocumentBacked) mergedEntity).changeSet = ((DocumentBacked) entity).getChangeSet(); - } - return mergedEntity; - } - - // clear changeSet from removed entity - // Object around(EntityManager em, Object entity) : entityManagerRemove(em, entity) { - // if (entity instanceof DocumentBacked) { - // removeChangeSetValues((DocumentBacked)entity); - // } - // return proceed(em, entity); - // } - - private static void removeChangeSetValues(DocumentBacked entity) { - LOGGER.debug("Removing all change-set values for " + entity); - ChangeSet nulledCs = new HashMapChangeSet(); - DocumentBacked documentEntity = (DocumentBacked) entity; - @SuppressWarnings("unchecked") - ChangeSetPersister changeSetPersister = (ChangeSetPersister) documentEntity.itdChangeSetPersister; - try { - changeSetPersister.getPersistentState(documentEntity.getClass(), documentEntity.get_persistent_id(), - documentEntity.getChangeSet()); - } catch (DataAccessException e) { - } catch (NotFoundException e) { - } - for (String key : entity.getChangeSet().getValues().keySet()) { - nulledCs.set(key, null); - } - entity.setChangeSet(nulledCs); - } - - before(DocumentBacked entity) : arbitraryUserConstructorOfChangeSetBackedObject(entity) { - LOGGER.debug("User-defined constructor called on DocumentBacked object of class " + entity.getClass()); - // Populate all ITD fields - entity.setChangeSet(new HashMapChangeSet()); - entity.itdChangeSetPersister = changeSetPersister; - entity.itdTransactionSynchronization = new ChangeSetBackedTransactionSynchronization(changeSetPersister, entity); - // registerTransactionSynchronization(entity); - } - - private static void registerTransactionSynchronization(DocumentBacked entity) { - if (TransactionSynchronizationManager.isSynchronizationActive()) { - if (!TransactionSynchronizationManager.getSynchronizations().contains(entity.itdTransactionSynchronization)) { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Adding transaction synchronization for " + entity); - } - TransactionSynchronizationManager.registerSynchronization(entity.itdTransactionSynchronization); - } else { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Transaction synchronization already active for " + entity); - } - } - } else { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Transaction synchronization is not active for " + entity); - } - } - } - - // ------------------------------------------------------------------------- - // ChangeSet-related mixins - // ------------------------------------------------------------------------- - // Introduced field - @Transient - private ChangeSet DocumentBacked.changeSet; - - @Transient - private ChangeSetPersister DocumentBacked.itdChangeSetPersister; - - @Transient - private ChangeSetBackedTransactionSynchronization DocumentBacked.itdTransactionSynchronization; - - public void DocumentBacked.setChangeSet(ChangeSet cs) { - this.changeSet = cs; - } - - public ChangeSet DocumentBacked.getChangeSet() { - return changeSet; - } - - // Flush the entity state to the persistent store - public void DocumentBacked.flush() { - Object id = itdChangeSetPersister.getPersistentId(this, this.changeSet); - itdChangeSetPersister.persistState(this, this.changeSet); - } - - public Object DocumentBacked.get_persistent_id() { - return itdChangeSetPersister.getPersistentId(this, this.changeSet); - } - - // lifecycle methods - @javax.persistence.PostPersist - public void DocumentBacked.itdPostPersist() { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("JPA lifecycle event PrePersist: " + this.getClass().getName()); - } - registerTransactionSynchronization(this); - } - - @javax.persistence.PreUpdate - public void DocumentBacked.itdPreUpdate() { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("JPA lifecycle event PreUpdate: " + this.getClass().getName() + " :: " + this); - } - registerTransactionSynchronization(this); - } - - @javax.persistence.PostUpdate - public void DocumentBacked.itdPostUpdate() { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("JPA lifecycle event PostUpdate: " + this.getClass().getName() + " :: " + this); - } - registerTransactionSynchronization(this); - } - - @javax.persistence.PostRemove - public void DocumentBacked.itdPostRemove() { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("JPA lifecycle event PostRemove: " + this.getClass().getName() + " :: " + this); - } - registerTransactionSynchronization(this); - removeChangeSetValues(this); - } - - @javax.persistence.PostLoad - public void DocumentBacked.itdPostLoad() { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("JPA lifecycle event PostLoad: " + this.getClass().getName() + " :: " + this); - } - registerTransactionSynchronization(this); - } - - /** - * delegates field reads to the state accessors instance - */ - Object around(DocumentBacked entity): entityFieldGet(entity) { - Field f = field(thisJoinPoint); - String propName = f.getName(); - LOGGER.trace("GET " + f + " -> ChangeSet value property [" + propName + "] using: " + entity.getChangeSet()); - if (entity.getChangeSet().getValues().get(propName) == null) { - try { - this.changeSetPersister - .getPersistentState(entity.getClass(), entity.get_persistent_id(), entity.getChangeSet()); - } catch (NotFoundException e) { - } - } - Object fValue = entity.getChangeSet().getValues().get(propName); - if (fValue != null) { - return fValue; - } - return proceed(entity); - } - - /** - * delegates field writes to the state accessors instance - */ - Object around(DocumentBacked entity, Object newVal) : entityFieldSet(entity, newVal) { - Field f = field(thisJoinPoint); - String propName = f.getName(); - LOGGER.trace("SET " + f + " -> ChangeSet number value property [" + propName + "] with value=[" + newVal + "]"); - entity.getChangeSet().set(propName, newVal); - return proceed(entity, newVal); - } - - Field field(JoinPoint joinPoint) { - FieldSignature fieldSignature = (FieldSignature) joinPoint.getSignature(); - return fieldSignature.getField(); - } -} diff --git a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/package-info.java b/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/package-info.java deleted file mode 100644 index 7209091339..0000000000 --- a/spring-data-mongodb-cross-store/src/main/java/org/springframework/data/mongodb/crossstore/package-info.java +++ /dev/null @@ -1,5 +0,0 @@ -/** - * Infrastructure for Spring Data's MongoDB cross store support. - */ -package org.springframework.data.mongodb.crossstore; - diff --git a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/CrossStoreMongoTests.java b/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/CrossStoreMongoTests.java deleted file mode 100644 index 25aad5c2fb..0000000000 --- a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/CrossStoreMongoTests.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore; - -import javax.persistence.EntityManager; -import javax.persistence.PersistenceContext; - -import org.bson.Document; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.crossstore.test.Address; -import org.springframework.data.mongodb.crossstore.test.Person; -import org.springframework.data.mongodb.crossstore.test.Resume; -import org.springframework.test.context.ContextConfiguration; -import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; -import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.transaction.TransactionStatus; -import org.springframework.transaction.annotation.Transactional; -import org.springframework.transaction.support.TransactionCallback; -import org.springframework.transaction.support.TransactionTemplate; - -/** - * Integration tests for MongoDB cross-store persistence (mainly {@link MongoChangeSetPersister}). - * - * @author Thomas Risberg - * @author Oliver Gierke - */ -@RunWith(SpringJUnit4ClassRunner.class) -@ContextConfiguration("classpath:/META-INF/spring/applicationContext.xml") -public class CrossStoreMongoTests { - - @Autowired MongoTemplate mongoTemplate; - - @PersistenceContext EntityManager entityManager; - - @Autowired PlatformTransactionManager transactionManager; - TransactionTemplate txTemplate; - - @Before - public void setUp() { - - txTemplate = new TransactionTemplate(transactionManager); - - clearData(Person.class); - - Address address = new Address(12, "MAin St.", "Boston", "MA", "02101"); - - Resume resume = new Resume(); - resume.addEducation("Skanstulls High School, 1975"); - resume.addEducation("Univ. of Stockholm, 1980"); - resume.addJob("DiMark, DBA, 1990-2000"); - resume.addJob("VMware, Developer, 2007-"); - - final Person person = new Person("Thomas", 20); - person.setAddress(address); - person.setResume(resume); - person.setId(1L); - - txTemplate.execute(new TransactionCallback() { - public Void doInTransaction(TransactionStatus status) { - entityManager.persist(person); - return null; - } - }); - } - - @After - public void tearDown() { - txTemplate.execute(new TransactionCallback() { - public Void doInTransaction(TransactionStatus status) { - entityManager.remove(entityManager.find(Person.class, 1L)); - return null; - } - }); - } - - private void clearData(Class domainType) { - - String collectionName = mongoTemplate.getCollectionName(domainType); - mongoTemplate.dropCollection(collectionName); - } - - @Test - @Transactional - public void testReadJpaToMongoEntityRelationship() { - - Person found = entityManager.find(Person.class, 1L); - Assert.assertNotNull(found); - Assert.assertEquals(Long.valueOf(1), found.getId()); - Assert.assertNotNull(found); - Assert.assertEquals(Long.valueOf(1), found.getId()); - Assert.assertNotNull(found.getResume()); - Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-", found.getResume().getJobs()); - } - - @Test - @Transactional - public void testUpdatedJpaToMongoEntityRelationship() { - - Person found = entityManager.find(Person.class, 1L); - found.setAge(44); - found.getResume().addJob("SpringDeveloper.com, Consultant, 2005-2006"); - - entityManager.merge(found); - - Assert.assertNotNull(found); - Assert.assertEquals(Long.valueOf(1), found.getId()); - Assert.assertNotNull(found); - Assert.assertEquals(Long.valueOf(1), found.getId()); - Assert.assertNotNull(found.getResume()); - Assert.assertEquals("DiMark, DBA, 1990-2000" + "; " + "VMware, Developer, 2007-" + "; " - + "SpringDeveloper.com, Consultant, 2005-2006", found.getResume().getJobs()); - } - - @Test - public void testMergeJpaEntityWithMongoDocument() { - - final Person detached = entityManager.find(Person.class, 1L); - entityManager.detach(detached); - detached.getResume().addJob("TargetRx, Developer, 2000-2005"); - - Person merged = txTemplate.execute(new TransactionCallback() { - public Person doInTransaction(TransactionStatus status) { - Person result = entityManager.merge(detached); - entityManager.flush(); - return result; - } - }); - - Assert.assertTrue(detached.getResume().getJobs().contains("TargetRx, Developer, 2000-2005")); - Assert.assertTrue(merged.getResume().getJobs().contains("TargetRx, Developer, 2000-2005")); - final Person updated = entityManager.find(Person.class, 1L); - Assert.assertTrue(updated.getResume().getJobs().contains("TargetRx, Developer, 2000-2005")); - } - - @Test - public void testRemoveJpaEntityWithMongoDocument() { - - txTemplate.execute(new TransactionCallback() { - public Person doInTransaction(TransactionStatus status) { - Person p2 = new Person("Thomas", 20); - Resume r2 = new Resume(); - r2.addEducation("Skanstulls High School, 1975"); - r2.addJob("DiMark, DBA, 1990-2000"); - p2.setResume(r2); - p2.setId(2L); - entityManager.persist(p2); - Person p3 = new Person("Thomas", 20); - Resume r3 = new Resume(); - r3.addEducation("Univ. of Stockholm, 1980"); - r3.addJob("VMware, Developer, 2007-"); - p3.setResume(r3); - p3.setId(3L); - entityManager.persist(p3); - return null; - } - }); - txTemplate.execute(new TransactionCallback() { - public Person doInTransaction(TransactionStatus status) { - final Person found2 = entityManager.find(Person.class, 2L); - entityManager.remove(found2); - return null; - } - }); - - boolean weFound3 = false; - - for (Document dbo : this.mongoTemplate.getCollection(mongoTemplate.getCollectionName(Person.class)).find()) { - Assert.assertTrue(!dbo.get("_entity_id").equals(2L)); - if (dbo.get("_entity_id").equals(3L)) { - weFound3 = true; - } - } - Assert.assertTrue(weFound3); - } - -} diff --git a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Address.java b/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Address.java deleted file mode 100644 index b6db0fd8eb..0000000000 --- a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Address.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore.test; - -public class Address { - - private Integer streetNumber; - private String streetName; - private String city; - private String state; - private String zip; - - public Address(Integer streetNumber, String streetName, String city, String state, String zip) { - super(); - this.streetNumber = streetNumber; - this.streetName = streetName; - this.city = city; - this.state = state; - this.zip = zip; - } - - public Integer getStreetNumber() { - return streetNumber; - } - - public void setStreetNumber(Integer streetNumber) { - this.streetNumber = streetNumber; - } - - public String getStreetName() { - return streetName; - } - - public void setStreetName(String streetName) { - this.streetName = streetName; - } - - public String getCity() { - return city; - } - - public void setCity(String city) { - this.city = city; - } - - public String getState() { - return state; - } - - public void setState(String state) { - this.state = state; - } - - public String getZip() { - return zip; - } - - public void setZip(String zip) { - this.zip = zip; - } - -} diff --git a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Person.java b/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Person.java deleted file mode 100644 index be1e15ea38..0000000000 --- a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Person.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore.test; - -import javax.persistence.Entity; -import javax.persistence.Id; - -import org.springframework.data.mongodb.crossstore.RelatedDocument; - -@Entity -public class Person { - - @Id - Long id; - - private String name; - - private int age; - - private java.util.Date birthDate; - - @RelatedDocument - private Address address; - - @RelatedDocument - private Resume resume; - - public Person() { - } - - public Person(String name, int age) { - this.name = name; - this.age = age; - this.birthDate = new java.util.Date(); - } - - public void birthday() { - ++age; - } - - public Long getId() { - return id; - } - - public void setId(Long id) { - this.id = id; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public int getAge() { - return age; - } - - public void setAge(int age) { - this.age = age; - } - - public java.util.Date getBirthDate() { - return birthDate; - } - - public void setBirthDate(java.util.Date birthDate) { - this.birthDate = birthDate; - } - - public Resume getResume() { - return resume; - } - - public void setResume(Resume resume) { - this.resume = resume; - } - - public Address getAddress() { - return address; - } - - public void setAddress(Address address) { - this.address = address; - } - -} diff --git a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Resume.java b/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Resume.java deleted file mode 100644 index 71a01ad8ee..0000000000 --- a/spring-data-mongodb-cross-store/src/test/java/org/springframework/data/mongodb/crossstore/test/Resume.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.crossstore.test; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.bson.types.ObjectId; -import org.springframework.data.annotation.Id; -import org.springframework.data.mongodb.core.mapping.Document; - -@Document -public class Resume { - - private static final Log LOGGER = LogFactory.getLog(Resume.class); - - @Id - private ObjectId id; - - private String education = ""; - - private String jobs = ""; - - public String getId() { - return id.toString(); - } - - public String getEducation() { - return education; - } - - public void addEducation(String education) { - LOGGER.debug("Adding education " + education); - this.education = this.education + (this.education.length() > 0 ? "; " : "") + education; - } - - public String getJobs() { - return jobs; - } - - public void addJob(String job) { - LOGGER.debug("Adding job " + job); - this.jobs = this.jobs + (this.jobs.length() > 0 ? "; " : "") + job; - } - - @Override - public String toString() { - return "Resume [education=" + education + ", jobs=" + jobs + "]"; - } - -} diff --git a/spring-data-mongodb-cross-store/src/test/resources/META-INF/persistence.xml b/spring-data-mongodb-cross-store/src/test/resources/META-INF/persistence.xml deleted file mode 100644 index 878fff47ba..0000000000 --- a/spring-data-mongodb-cross-store/src/test/resources/META-INF/persistence.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - org.hibernate.ejb.HibernatePersistence - org.springframework.data.mongodb.crossstore.test.Person - - - - - - - - diff --git a/spring-data-mongodb-cross-store/src/test/resources/META-INF/spring/applicationContext.xml b/spring-data-mongodb-cross-store/src/test/resources/META-INF/spring/applicationContext.xml deleted file mode 100644 index 3fad886b03..0000000000 --- a/spring-data-mongodb-cross-store/src/test/resources/META-INF/spring/applicationContext.xml +++ /dev/null @@ -1,72 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/spring-data-mongodb-cross-store/src/test/resources/logback.xml b/spring-data-mongodb-cross-store/src/test/resources/logback.xml deleted file mode 100644 index 5ecc71909e..0000000000 --- a/spring-data-mongodb-cross-store/src/test/resources/logback.xml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - %d %5p %40.40c:%4L - %m%n - - - - - - - - - - \ No newline at end of file diff --git a/spring-data-mongodb-distribution/package.json b/spring-data-mongodb-distribution/package.json new file mode 100644 index 0000000000..4689506b3f --- /dev/null +++ b/spring-data-mongodb-distribution/package.json @@ -0,0 +1,10 @@ +{ + "dependencies": { + "antora": "3.2.0-alpha.6", + "@antora/atlas-extension": "1.0.0-alpha.2", + "@antora/collector-extension": "1.0.0-alpha.7", + "@asciidoctor/tabs": "1.0.0-beta.6", + "@springio/antora-extensions": "1.13.0", + "@springio/asciidoctor-extensions": "1.0.0-alpha.11" + } +} diff --git a/spring-data-mongodb-distribution/pom.xml b/spring-data-mongodb-distribution/pom.xml index e5c865ea08..fc88571622 100644 --- a/spring-data-mongodb-distribution/pom.xml +++ b/spring-data-mongodb-distribution/pom.xml @@ -1,5 +1,7 @@ - + 4.0.0 @@ -13,30 +15,62 @@ org.springframework.data spring-data-mongodb-parent - 2.1.0.BUILD-SNAPSHOT + 5.0.0-SNAPSHOT ../pom.xml ${basedir}/.. - SDMONGO + ${project.basedir}/../src/main/antora/antora-playbook.yml + + + ${project.basedir}/../src/main/antora/resources/antora-resources + true + + + + org.codehaus.mojo + build-helper-maven-plugin + 3.4.0 + + + timestamp-property + + timestamp-property + + validate + + current.year + yyyy + + + + org.apache.maven.plugins - maven-assembly-plugin + maven-resources-plugin + + + + resources + + + - org.codehaus.mojo - wagon-maven-plugin + org.apache.maven.plugins + maven-assembly-plugin - org.asciidoctor - asciidoctor-maven-plugin + org.antora + antora-maven-plugin + diff --git a/spring-data-mongodb/pom.xml b/spring-data-mongodb/pom.xml index b86dc2808c..6f34da5660 100644 --- a/spring-data-mongodb/pom.xml +++ b/spring-data-mongodb/pom.xml @@ -1,5 +1,7 @@ - + 4.0.0 @@ -11,7 +13,7 @@ org.springframework.data spring-data-mongodb-parent - 2.1.0.BUILD-SNAPSHOT + 5.0.0-SNAPSHOT ../pom.xml @@ -25,6 +27,30 @@ + + + org.mongodb + mongodb-driver-core + + + + org.mongodb + mongodb-driver-sync + true + + + + org.mongodb + mongodb-driver-reactivestreams + true + + + + org.mongodb + mongodb-crypt + true + + org.springframework @@ -41,12 +67,6 @@ org.springframework spring-core - - - commons-logging - commons-logging - - org.springframework @@ -65,6 +85,12 @@ querydsl-mongodb ${querydsl} true + + + org.mongodb + mongo-java-driver + + @@ -81,33 +107,15 @@ true - - org.mongodb - mongodb-driver-reactivestreams - ${mongo.reactivestreams} + com.google.code.findbugs + jsr305 + 3.0.2 true - org.mongodb - mongodb-driver-async - ${mongo} - true - - - org.mongodb - mongodb-driver-core - - - org.mongodb - bson - - - - - - io.projectreactor + io.projectreactor reactor-core true @@ -119,35 +127,28 @@ - io.reactivex - rxjava - ${rxjava} - true - - - - io.reactivex - rxjava-reactive-streams - ${rxjava-reactive-streams} - true + org.awaitility + awaitility + ${awaitility} + test - io.reactivex.rxjava2 + io.reactivex.rxjava3 rxjava - ${rxjava2} + ${rxjava3} true - - - org.apache.geronimo.specs - geronimo-jcdi_2.0_spec - 1.0.1 + net.javacrumbs.json-unit + json-unit-assertj + 4.1.0 test + + javax.interceptor javax.interceptor-api @@ -156,17 +157,17 @@ - javax.enterprise - cdi-api + jakarta.enterprise + jakarta.enterprise.cdi-api ${cdi} provided true - javax.annotation - javax.annotation-api - ${javax-annotation-api} + jakarta.annotation + jakarta.annotation-api + ${jakarta-annotation-api} test @@ -179,8 +180,8 @@ - javax.validation - validation-api + jakarta.validation + jakarta.validation-api ${validation} true @@ -193,37 +194,44 @@ - org.hibernate - hibernate-validator - 5.2.4.Final - test + io.micrometer + micrometer-observation + true - joda-time - joda-time - ${jodatime} + io.micrometer + micrometer-tracing + true + + + + org.hibernate.validator + hibernate-validator + 7.0.1.Final test - org.threeten - threetenbp - ${threetenbp} + jakarta.el + jakarta.el-api + 4.0.0 + provided true - com.fasterxml.jackson.core - jackson-databind + org.glassfish + jakarta.el + 4.0.2 + provided true - org.slf4j - jul-to-slf4j - ${slf4j} - test + com.fasterxml.jackson.core + jackson-databind + true @@ -254,9 +262,42 @@ - javax.transaction - jta - 1.1 + org.junit-pioneer + junit-pioneer + 0.5.3 + test + + + + org.junit.platform + junit-platform-launcher + test + + + + org.testcontainers + junit-jupiter + ${testcontainers} + test + + + + org.testcontainers + mongodb + ${testcontainers} + test + + + + jakarta.transaction + jakarta.transaction-api + 2.0.0 + test + + + + org.springframework + spring-core-test test @@ -264,48 +305,153 @@ org.jetbrains.kotlin kotlin-stdlib - ${kotlin} true + org.jetbrains.kotlin kotlin-reflect - ${kotlin} true + - org.jetbrains.kotlin - kotlin-test - ${kotlin} + org.jetbrains.kotlinx + kotlinx-coroutines-core + true + + + + org.jetbrains.kotlinx + kotlinx-coroutines-reactor + true + + + + io.mockk + mockk-jvm + ${mockk} test + - com.nhaarman - mockito-kotlin - 1.5.0 + io.micrometer + micrometer-test test - org.jetbrains.kotlin - kotlin-stdlib - - - org.jetbrains.kotlin - kotlin-reflect - - - org.mockito - mockito-core + com.github.tomakehurst + wiremock-jre8-standalone + + io.micrometer + micrometer-tracing-test + test + + + + io.micrometer + micrometer-tracing-integration-test + test + + + + + org.jmolecules + jmolecules-ddd + ${jmolecules} + test + - + + + nullaway + + + + org.apache.maven.plugins + maven-compiler-plugin + + + + com.querydsl + querydsl-apt + ${querydsl} + + + org.openjdk.jmh + jmh-generator-annprocess + ${jmh} + + + com.google.errorprone + error_prone_core + ${errorprone} + + + com.uber.nullaway + nullaway + ${nullaway} + + + + + + default-compile + none + + + default-testCompile + none + + + java-compile + compile + + compile + + + + -XDcompilePolicy=simple + --should-stop=ifError=FLOW + -Xplugin:ErrorProne -XepDisableAllChecks -Xep:NullAway:ERROR -XepOpt:NullAway:OnlyNullMarked=true -XepOpt:NullAway:TreatGeneratedAsUnannotated=true -XepOpt:NullAway:CustomContractAnnotations=org.springframework.lang.Contract + + + + + java-test-compile + test-compile + + testCompile + + + + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + + + org.openjdk.jmh + jmh-generator-annprocess + ${jmh} + + + + + com.mysema.maven apt-maven-plugin @@ -324,8 +470,11 @@ test-process - target/generated-test-sources - org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor + target/generated-test-sources + + + org.springframework.data.mongodb.repository.support.MongoAnnotationProcessor + @@ -335,6 +484,7 @@ org.apache.maven.plugins maven-surefire-plugin + false false **/*Tests.java @@ -344,15 +494,13 @@ **/ReactivePerformanceTests.java - src/test/resources/logging.properties + ${mongo} + ${env.MONGO_VERSION} + + src/test/resources/logging.properties + true - - - listener - org.springframework.data.mongodb.test.util.CleanMongoDBJunitRunListener - - diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java similarity index 94% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java index da457264c0..3b0c72cc0b 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/ProjectionsBenchmark.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,6 +19,7 @@ import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.TearDown; + import org.springframework.beans.factory.annotation.Value; import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery; @@ -27,8 +28,8 @@ import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.microbenchmark.AbstractMicrobenchmark; -import com.mongodb.MongoClient; -import com.mongodb.ServerAddress; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; import com.mongodb.client.MongoCollection; /** @@ -56,7 +57,7 @@ public class ProjectionsBenchmark extends AbstractMicrobenchmark { @Setup public void setUp() { - client = new MongoClient(new ServerAddress()); + client = MongoClients.create(); template = new MongoTemplate(client, DB_NAME); source = new Person(); @@ -83,7 +84,7 @@ public void setUp() { @TearDown public void tearDown() { - client.dropDatabase(DB_NAME); + client.getDatabase(DB_NAME).drop(); client.close(); } diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java similarity index 77% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java index b10eb6b4ad..53f64f2a50 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/DbRefMappingBenchmark.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,8 +18,6 @@ import static org.springframework.data.mongodb.core.query.Criteria.*; import static org.springframework.data.mongodb.core.query.Query.*; -import lombok.Data; - import java.util.ArrayList; import java.util.List; @@ -29,14 +27,15 @@ import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.TearDown; + import org.springframework.data.annotation.Id; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.mapping.DBRef; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.microbenchmark.AbstractMicrobenchmark; -import com.mongodb.MongoClient; -import com.mongodb.ServerAddress; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; /** * @author Christoph Strobl @@ -55,7 +54,7 @@ public class DbRefMappingBenchmark extends AbstractMicrobenchmark { @Setup public void setUp() throws Exception { - client = new MongoClient(new ServerAddress()); + client = MongoClients.create(); template = new MongoTemplate(client, DB_NAME); List refObjects = new ArrayList<>(); @@ -80,7 +79,7 @@ public void setUp() throws Exception { @TearDown public void tearDown() { - client.dropDatabase(DB_NAME); + client.getDatabase(DB_NAME).drop(); client.close(); } @@ -94,18 +93,56 @@ public ObjectWithDBRef readMultipleDbRefs() { return template.findOne(queryObjectWithDBRefList, ObjectWithDBRef.class); } - @Data static class ObjectWithDBRef { private @Id ObjectId id; private @DBRef RefObject ref; private @DBRef List refList; + + public ObjectId getId() { + return id; + } + + public void setId(ObjectId id) { + this.id = id; + } + + public RefObject getRef() { + return ref; + } + + public void setRef(RefObject ref) { + this.ref = ref; + } + + public List getRefList() { + return refList; + } + + public void setRefList(List refList) { + this.refList = refList; + } } - @Data static class RefObject { private @Id String id; private String someValue; + + public String getId() { + return id; + } + + public void setId(String id) { + this.id = id; + } + + public String getSomeValue() { + return someValue; + } + + public void setSomeValue(String someValue) { + this.someValue = someValue; + } } } diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java similarity index 74% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java index 3d6cd34c43..00d2e7034a 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/core/convert/MappingMongoConverterBenchmark.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,6 @@ */ package org.springframework.data.mongodb.core.convert; -import lombok.AllArgsConstructor; -import lombok.Data; -import lombok.Getter; -import lombok.RequiredArgsConstructor; - import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; @@ -29,25 +24,29 @@ import org.bson.Document; import org.bson.types.ObjectId; +import org.junit.platform.commons.annotation.Testable; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.TearDown; + import org.springframework.data.annotation.Id; import org.springframework.data.geo.Point; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.microbenchmark.AbstractMicrobenchmark; +import org.springframework.util.ObjectUtils; -import com.mongodb.MongoClient; -import com.mongodb.ServerAddress; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; /** * @author Christoph Strobl */ @State(Scope.Benchmark) +@Testable public class MappingMongoConverterBenchmark extends AbstractMicrobenchmark { private static final String DB_NAME = "mapping-mongo-converter-benchmark"; @@ -64,13 +63,13 @@ public class MappingMongoConverterBenchmark extends AbstractMicrobenchmark { @Setup public void setUp() throws Exception { - client = new MongoClient(new ServerAddress()); + client = MongoClients.create(); this.mappingContext = new MongoMappingContext(); this.mappingContext.setInitialEntitySet(Collections.singleton(Customer.class)); this.mappingContext.afterPropertiesSet(); - DbRefResolver dbRefResolver = new DefaultDbRefResolver(new SimpleMongoDbFactory(client, DB_NAME)); + DbRefResolver dbRefResolver = new DefaultDbRefResolver(new SimpleMongoClientDatabaseFactory(client, DB_NAME)); this.converter = new MappingMongoConverter(dbRefResolver, mappingContext); this.converter.setCustomConversions(new MongoCustomConversions(Collections.emptyList())); @@ -116,7 +115,7 @@ public void setUp() throws Exception { @TearDown public void tearDown() { - client.dropDatabase(DB_NAME); + client.getDatabase(DB_NAME).drop(); client.close(); } @@ -151,22 +150,36 @@ public Object writeObjectWithListAndMapsOfComplexType() { return sink; } - @Getter - @RequiredArgsConstructor static class Customer { private @Id ObjectId id; private final String firstname, lastname; private final Address address; + + public Customer(String firstname, String lastname, Address address) { + this.firstname = firstname; + this.lastname = lastname; + this.address = address; + } } - @Getter - @AllArgsConstructor static class Address { private String zipCode, city; + + public Address(String zipCode, String city) { + this.zipCode = zipCode; + this.city = city; + } + + public String getZipCode() { + return zipCode; + } + + public String getCity() { + return city; + } } - @Data static class SlightlyMoreComplexObject { @Id String id; @@ -177,5 +190,59 @@ static class SlightlyMoreComplexObject { Customer customer; List
addressList; Map customerMap; + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (!(o instanceof SlightlyMoreComplexObject)) { + return false; + } + SlightlyMoreComplexObject that = (SlightlyMoreComplexObject) o; + if (intOne != that.intOne) { + return false; + } + if (intTwo != that.intTwo) { + return false; + } + if (!ObjectUtils.nullSafeEquals(id, that.id)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(stringOne, that.stringOne)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(stringTwo, that.stringTwo)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(renamedField, that.renamedField)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(location, that.location)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(customer, that.customer)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(addressList, that.addressList)) { + return false; + } + return ObjectUtils.nullSafeEquals(customerMap, that.customerMap); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(id); + result = 31 * result + intOne; + result = 31 * result + intTwo; + result = 31 * result + ObjectUtils.nullSafeHashCode(stringOne); + result = 31 * result + ObjectUtils.nullSafeHashCode(stringTwo); + result = 31 * result + ObjectUtils.nullSafeHashCode(renamedField); + result = 31 * result + ObjectUtils.nullSafeHashCode(location); + result = 31 * result + ObjectUtils.nullSafeHashCode(customer); + result = 31 * result + ObjectUtils.nullSafeHashCode(addressList); + result = 31 * result + ObjectUtils.nullSafeHashCode(customerMap); + return result; + } } } diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java similarity index 97% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java index f10e03c5de..615500904d 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/AbstractMicrobenchmark.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,7 +21,6 @@ import java.util.Collection; import java.util.Date; -import org.junit.Test; import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Scope; @@ -33,6 +32,7 @@ import org.openjdk.jmh.runner.options.ChainedOptionsBuilder; import org.openjdk.jmh.runner.options.OptionsBuilder; import org.openjdk.jmh.runner.options.TimeValue; + import org.springframework.core.env.StandardEnvironment; import org.springframework.util.CollectionUtils; import org.springframework.util.ResourceUtils; @@ -41,8 +41,8 @@ /** * @author Christoph Strobl */ -@Warmup(iterations = AbstractMicrobenchmark.WARMUP_ITERATIONS) -@Measurement(iterations = AbstractMicrobenchmark.MEASUREMENT_ITERATIONS) +@Warmup(iterations = AbstractMicrobenchmark.WARMUP_ITERATIONS, time = 2) +@Measurement(iterations = AbstractMicrobenchmark.MEASUREMENT_ITERATIONS, time = 2) @Fork(AbstractMicrobenchmark.FORKS) @State(Scope.Thread) public class AbstractMicrobenchmark { @@ -62,7 +62,6 @@ public class AbstractMicrobenchmark { * @throws Exception * @see #options(String) */ - @Test public void run() throws Exception { String includes = includes(); @@ -322,7 +321,7 @@ private void publishResults(Collection results) { try { ResultsWriter.forUri(uri).write(results); } catch (Exception e) { - System.err.println(String.format("Cannot save benchmark results to '%s'. Error was %s.", uri, e)); + System.err.println(String.format("Cannot save benchmark results to '%s'; Error was %s", uri, e)); } } } diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java similarity index 89% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java index 2eec70441a..af56908755 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/HttpResultsWriter.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,8 @@ */ package org.springframework.data.mongodb.microbenchmark; -import lombok.SneakyThrows; +import java.io.IOException; import java.io.OutputStream; import java.net.HttpURLConnection; import java.net.URL; @@ -43,13 +43,20 @@ class HttpResultsWriter implements ResultsWriter { } @Override - @SneakyThrows public void write(Collection results) { if (CollectionUtils.isEmpty(results)) { return; } + try { + doWrite(results); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + private void doWrite(Collection results) throws IOException { StandardEnvironment env = new StandardEnvironment(); String projectVersion = env.getProperty("project.version", "unknown"); diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java similarity index 84% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java index d7166863f8..2114d2a06a 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/MongoResultsWriter.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,15 +21,16 @@ import org.bson.Document; import org.openjdk.jmh.results.RunResult; + import org.springframework.core.env.StandardEnvironment; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; import com.mongodb.BasicDBObject; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientURI; +import com.mongodb.ConnectionString; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; import com.mongodb.client.MongoDatabase; -import com.mongodb.util.JSON; /** * MongoDB specific {@link ResultsWriter} implementation. @@ -56,13 +57,14 @@ public void write(Collection results) { String gitDirty = env.getProperty("git.dirty", "no"); String gitCommitId = env.getProperty("git.commit.id", "unknown"); - MongoClientURI uri = new MongoClientURI(this.uri); - MongoClient client = new MongoClient(uri); + ConnectionString connectionString = new ConnectionString(this.uri); + MongoClient client = MongoClients.create(this.uri); - String dbName = StringUtils.hasText(uri.getDatabase()) ? uri.getDatabase() : "spring-data-mongodb-benchmarks"; + String dbName = StringUtils.hasText(connectionString.getDatabase()) ? connectionString.getDatabase() + : "spring-data-mongodb-benchmarks"; MongoDatabase db = client.getDatabase(dbName); - for (BasicDBObject dbo : (List) JSON.parse(ResultsWriter.jsonifyResults(results))) { + for (Document dbo : (List) Document.parse(ResultsWriter.jsonifyResults(results))) { String collectionName = extractClass(dbo.get("benchmark").toString()); diff --git a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java similarity index 87% rename from spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java rename to spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java index 73b4d04b44..95da1750bc 100644 --- a/spring-data-mongodb-benchmarks/src/main/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/microbenchmark/ResultsWriter.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,6 @@ */ package org.springframework.data.mongodb.microbenchmark; -import lombok.SneakyThrows; - import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.nio.charset.StandardCharsets; @@ -54,13 +52,12 @@ static ResultsWriter forUri(String uri) { * * @param results * @return json string representation of results. - * @see org.openjdk.jmh.results.format.JSONResultFormat */ - @SneakyThrows static String jsonifyResults(Collection results) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); - ResultFormatFactory.getInstance(ResultFormatType.JSON, new PrintStream(baos, true, "UTF-8")).writeOut(results); + ResultFormatFactory.getInstance(ResultFormatType.JSON, new PrintStream(baos, true, StandardCharsets.UTF_8)) + .writeOut(results); return new String(baos.toByteArray(), StandardCharsets.UTF_8); } diff --git a/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/repository/AotRepositoryBenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/repository/AotRepositoryBenchmark.java new file mode 100644 index 0000000000..ba9da66da4 --- /dev/null +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/repository/AotRepositoryBenchmark.java @@ -0,0 +1,153 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import org.junit.platform.commons.annotation.Testable; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.Level; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; + +import org.springframework.aot.test.generate.TestGenerationContext; +import org.springframework.core.test.tools.TestCompiler; +import org.springframework.data.mongodb.core.MongoOperations; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.microbenchmark.AbstractMicrobenchmark; +import org.springframework.data.mongodb.repository.aot.MongoRepositoryContributor; +import org.springframework.data.mongodb.repository.aot.TestMongoAotRepositoryContext; +import org.springframework.data.mongodb.repository.support.MongoRepositoryFactory; +import org.springframework.data.mongodb.repository.support.QuerydslMongoPredicateExecutor; +import org.springframework.data.mongodb.repository.support.SimpleMongoRepository; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.repository.core.RepositoryMetadata; +import org.springframework.data.repository.core.support.RepositoryComposition; +import org.springframework.data.repository.core.support.RepositoryFactoryBeanSupport; +import org.springframework.data.repository.core.support.RepositoryFragment; +import org.springframework.data.repository.query.ValueExpressionDelegate; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; + +/** + * Benchmark for AOT repositories. + * + * @author Mark Paluch + */ +@Testable +public class AotRepositoryBenchmark extends AbstractMicrobenchmark { + + @State(Scope.Benchmark) + public static class BenchmarkParameters { + + public static Class aot; + public static TestMongoAotRepositoryContext repositoryContext = new TestMongoAotRepositoryContext( + SmallerPersonRepository.class, + RepositoryComposition.of(RepositoryFragment.structural(SimpleMongoRepository.class), + RepositoryFragment.structural(QuerydslMongoPredicateExecutor.class))); + + MongoClient mongoClient; + MongoTemplate mongoTemplate; + RepositoryComposition.RepositoryFragments fragments; + SmallerPersonRepository repositoryProxy; + + @Setup(Level.Trial) + public void doSetup() { + + mongoClient = MongoClients.create(); + mongoTemplate = new MongoTemplate(mongoClient, "jmh"); + + if (this.aot == null) { + + TestGenerationContext generationContext = new TestGenerationContext(PersonRepository.class); + + new MongoRepositoryContributor(repositoryContext).contribute(generationContext); + + TestCompiler.forSystem().withCompilerOptions("-parameters").with(generationContext).compile(compiled -> { + + try { + this.aot = compiled.getClassLoader().loadClass(SmallerPersonRepository.class.getName() + "Impl__Aot"); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + try { + RepositoryFactoryBeanSupport.FragmentCreationContext creationContext = getCreationContext(repositoryContext); + fragments = RepositoryComposition.RepositoryFragments + .just(aot.getConstructor(MongoOperations.class, RepositoryFactoryBeanSupport.FragmentCreationContext.class) + .newInstance(mongoTemplate, creationContext)); + + this.repositoryProxy = createRepository(fragments); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + private RepositoryFactoryBeanSupport.FragmentCreationContext getCreationContext( + TestMongoAotRepositoryContext repositoryContext) { + + RepositoryFactoryBeanSupport.FragmentCreationContext creationContext = new RepositoryFactoryBeanSupport.FragmentCreationContext() { + @Override + public RepositoryMetadata getRepositoryMetadata() { + return repositoryContext.getRepositoryInformation(); + } + + @Override + public ValueExpressionDelegate getValueExpressionDelegate() { + return ValueExpressionDelegate.create(); + } + + @Override + public ProjectionFactory getProjectionFactory() { + return new SpelAwareProxyProjectionFactory(); + } + }; + + return creationContext; + } + + @TearDown(Level.Trial) + public void doTearDown() { + mongoClient.close(); + } + + public SmallerPersonRepository createRepository(RepositoryComposition.RepositoryFragments fragments) { + MongoRepositoryFactory repositoryFactory = new MongoRepositoryFactory(mongoTemplate); + return repositoryFactory.getRepository(SmallerPersonRepository.class, fragments); + } + + } + + @Benchmark + public SmallerPersonRepository repositoryBootstrap(BenchmarkParameters parameters) { + return parameters.createRepository(parameters.fragments); + } + + @Benchmark + public Object findDerived(BenchmarkParameters parameters) { + return parameters.repositoryProxy.findByFirstname("foo"); + } + + @Benchmark + public Object findAnnotated(BenchmarkParameters parameters) { + return parameters.repositoryProxy.findByThePersonsFirstname("foo"); + } + +} diff --git a/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/repository/SmallerPersonRepository.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/repository/SmallerPersonRepository.java new file mode 100644 index 0000000000..bc3868e052 --- /dev/null +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/repository/SmallerPersonRepository.java @@ -0,0 +1,477 @@ +/* + * Copyright 2010-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import java.util.Collection; +import java.util.Date; +import java.util.List; +import java.util.Optional; +import java.util.UUID; +import java.util.stream.Stream; + +import org.jspecify.annotations.Nullable; + +import org.springframework.data.domain.Limit; +import org.springframework.data.domain.Page; +import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Slice; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.aggregation.AggregationResults; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.repository.Person.Sex; +import org.springframework.data.querydsl.QuerydslPredicateExecutor; +import org.springframework.data.repository.query.Param; + +/** + * Sample repository managing {@link Person} entities. + * + * @author Oliver Gierke + * @author Thomas Darimont + * @author Christoph Strobl + * @author Fırat KÜÇÜK + * @author Mark Paluch + */ +public interface SmallerPersonRepository extends MongoRepository, QuerydslPredicateExecutor { + + /** + * Returns all {@link Person}s with the given lastname. + * + * @param lastname + * @return + */ + List findByLastname(String lastname); + + List findByLastnameStartsWith(String prefix); + + List findByLastnameEndsWith(String postfix); + + /** + * Returns all {@link Person}s with the given lastname ordered by their firstname. + * + * @param lastname + * @return + */ + List findByLastnameOrderByFirstnameAsc(String lastname); + + /** + * Returns the {@link Person}s with the given firstname. Uses {@link Query} annotation to define the query to be + * executed. + * + * @param firstname + * @return + */ + @Query(value = "{ 'lastname' : ?0 }", fields = "{ 'firstname': 1, 'lastname': 1}") + List findByThePersonsLastname(String lastname); + + /** + * Returns the {@link Person}s with the given firstname. Uses {@link Query} annotation to define the query to be + * executed. + * + * @param firstname + * @return + */ + @Query(value = "{ 'firstname' : ?0 }", fields = "{ 'firstname': 1, 'lastname': 1}") + List findByThePersonsFirstname(String firstname); + + // DATAMONGO-871 + @Query(value = "{ 'firstname' : ?0 }") + Person[] findByThePersonsFirstnameAsArray(String firstname); + + /** + * Returns all {@link Person}s with a firstname matching the given one (*-wildcard supported). + * + * @param firstname + * @return + */ + List findByFirstnameLike(@Nullable String firstname); + + List findByFirstnameNotContains(String firstname); + + /** + * Returns all {@link Person}s with a firstname not matching the given one (*-wildcard supported). + * + * @param firstname + * @return + */ + List findByFirstnameNotLike(String firstname); + + List findByFirstnameLikeOrderByLastnameAsc(String firstname, Sort sort); + + List findBySkillsContains(List skills); + + List findBySkillsNotContains(List skills); + + @Query("{'age' : { '$lt' : ?0 } }") + List findByAgeLessThan(int age, Sort sort); + + /** + * Returns a scroll of {@link Person}s with a lastname matching the given one (*-wildcards supported). + * + * @param lastname + * @param scrollPosition + * @return + */ + Window findTop2ByLastnameLikeOrderByLastnameAscFirstnameAsc(String lastname, ScrollPosition scrollPosition); + + Window findByLastnameLikeOrderByLastnameAscFirstnameAsc(String lastname, ScrollPosition scrollPosition, + Limit limit); + + /** + * Returns a scroll of {@link Person}s applying projections with a lastname matching the given one (*-wildcards + * supported). + * + * @param lastname + * @param pageable + * @return + */ + Window findCursorProjectionByLastnameLike(String lastname, Pageable pageable); + + /** + * Returns a page of {@link Person}s with a lastname matching the given one (*-wildcards supported). + * + * @param lastname + * @param pageable + * @return + */ + Page findByLastnameLike(String lastname, Pageable pageable); + + List findByLastnameLike(String lastname, Sort sort, Limit limit); + + @Query("{ 'lastname' : { '$regex' : '?0', '$options' : 'i'}}") + Page findByLastnameLikeWithPageable(String lastname, Pageable pageable); + + List findByFirstname(String firstname); + + List findByLastnameIgnoreCaseIn(String... lastname); + + /** + * Returns all {@link Person}s with a firstname contained in the given varargs. + * + * @param firstnames + * @return + */ + List findByFirstnameIn(String... firstnames); + + /** + * Returns all {@link Person}s with a firstname not contained in the given collection. + * + * @param firstnames + * @return + */ + List findByFirstnameNotIn(Collection firstnames); + + List findByFirstnameAndLastname(String firstname, String lastname); + + /** + * Returns all {@link Person}s with an age between the two given values. + * + * @param from + * @param to + * @return + */ + List findByAgeBetween(int from, int to); + + /** + * Returns the {@link Person} with the given {@link Address} as shipping address. + * + * @param address + * @return + */ + Person findByShippingAddresses(Address address); + + /** + * Returns all {@link Person}s with the given {@link Address}. + * + * @param address + * @return + */ + List findByAddress(Address address); + + List findByAddressZipCode(String zipCode); + + List findByLastnameLikeAndAgeBetween(String lastname, int from, int to); + + List findByAgeOrLastnameLikeAndFirstnameLike(int age, String lastname, String firstname); + + // TODO: List findByLocationNear(Point point); + + // TODO: List findByLocationWithin(Circle circle); + + // TODO: List findByLocationWithin(Box box); + + // TODO: List findByLocationWithin(Polygon polygon); + + List findBySex(Sex sex); + + List findBySex(Sex sex, Pageable pageable); + + // TODO: List findByNamedQuery(String firstname); + + List findByCreator(User user); + + // DATAMONGO-425 + List findByCreatedAtLessThan(Date date); + + // DATAMONGO-425 + List findByCreatedAtGreaterThan(Date date); + + // DATAMONGO-425 + @Query("{ 'createdAt' : { '$lt' : ?0 }}") + List findByCreatedAtLessThanManually(Date date); + + // DATAMONGO-427 + List findByCreatedAtBefore(Date date); + + // DATAMONGO-427 + List findByCreatedAtAfter(Date date); + + // DATAMONGO-472 + List findByLastnameNot(String lastname); + + // DATAMONGO-600 + List findByCredentials(Credentials credentials); + + // DATAMONGO-636 + long countByLastname(String lastname); + + // DATAMONGO-636 + int countByFirstname(String firstname); + + // DATAMONGO-636 + @Query(value = "{ 'lastname' : ?0 }", count = true) + long someCountQuery(String lastname); + + // DATAMONGO-1454 + boolean existsByFirstname(String firstname); + + // DATAMONGO-1454 + @ExistsQuery(value = "{ 'lastname' : ?0 }") + boolean someExistQuery(String lastname); + + // DATAMONGO-770 + List findByFirstnameIgnoreCase(@Nullable String firstName); + + // DATAMONGO-770 + List findByFirstnameNotIgnoreCase(String firstName); + + // DATAMONGO-770 + List findByFirstnameStartingWithIgnoreCase(String firstName); + + // DATAMONGO-770 + List findByFirstnameEndingWithIgnoreCase(String firstName); + + // DATAMONGO-770 + List findByFirstnameContainingIgnoreCase(String firstName); + + // DATAMONGO-870 + Slice findByAgeGreaterThan(int age, Pageable pageable); + + // DATAMONGO-821 + @Query("{ creator : { $exists : true } }") + Page findByHavingCreator(Pageable page); + + // DATAMONGO-566 + List deleteByLastname(String lastname); + + // DATAMONGO-566 + Long deletePersonByLastname(String lastname); + + // DATAMONGO-1997 + Optional deleteOptionalByLastname(String lastname); + + // DATAMONGO-566 + @Query(value = "{ 'lastname' : ?0 }", delete = true) + List removeByLastnameUsingAnnotatedQuery(String lastname); + + // DATAMONGO-566 + @Query(value = "{ 'lastname' : ?0 }", delete = true) + Long removePersonByLastnameUsingAnnotatedQuery(String lastname); + + // DATAMONGO-893 + Page findByAddressIn(List
address, Pageable page); + + // DATAMONGO-745 + @Query("{firstname:{$in:?0}, lastname:?1}") + Page findByCustomQueryFirstnamesAndLastname(List firstnames, String lastname, Pageable page); + + // DATAMONGO-745 + @Query("{lastname:?0, 'address.street':{$in:?1}}") + Page findByCustomQueryLastnameAndAddressStreetInList(String lastname, List streetNames, + Pageable page); + + // DATAMONGO-950 + List findTop3ByLastnameStartingWith(String lastname); + + // DATAMONGO-950 + Page findTop3ByLastnameStartingWith(String lastname, Pageable pageRequest); + + // DATAMONGO-1865 + Person findFirstBy(); // limits to 1 result if more, just return the first one + + // DATAMONGO-1865 + Person findPersonByLastnameLike(String firstname); // single person, error if more than one + + // DATAMONGO-1865 + Optional findOptionalPersonByLastnameLike(String firstname); // optional still, error when more than one + + // DATAMONGO-1030 + PersonSummaryDto findSummaryByLastname(String lastname); + + PersonSummaryWithOptional findSummaryWithOptionalByLastname(String lastname); + + @Query("{ ?0 : ?1 }") + List findByKeyValue(String key, String value); + + // DATAMONGO-1165 + @Query("{ firstname : { $in : ?0 }}") + Stream findByCustomQueryWithStreamingCursorByFirstnames(List firstnames); + + // DATAMONGO-990 + @Query("{ firstname : ?#{[0]}}") + List findWithSpelByFirstnameForSpELExpressionWithParameterIndexOnly(String firstname); + + // DATAMONGO-990 + @Query("{ firstname : ?#{[0]}, email: ?#{principal.email} }") + List findWithSpelByFirstnameAndCurrentUserWithCustomQuery(String firstname); + + // DATAMONGO-990 + @Query("{ firstname : :#{#firstname}}") + List findWithSpelByFirstnameForSpELExpressionWithParameterVariableOnly(@Param("firstname") String firstname); + + // DATAMONGO-1911 + @Query("{ uniqueId: ?0}") + Person findByUniqueId(UUID uniqueId); + + /** + * Returns the count of {@link Person} with the given firstname. Uses {@link CountQuery} annotation to define the + * query to be executed. + * + * @param firstname + * @return + */ + @CountQuery("{ 'firstname' : ?0 }") // DATAMONGO-1539 + long countByThePersonsFirstname(String firstname); + + /** + * Deletes {@link Person} entities with the given firstname. Uses {@link DeleteQuery} annotation to define the query + * to be executed. + * + * @param firstname + */ + @DeleteQuery("{ 'firstname' : ?0 }") // DATAMONGO-1539 + void deleteByThePersonsFirstname(String firstname); + + // DATAMONGO-1752 + Iterable findOpenProjectionBy(); + + // DATAMONGO-1752 + Iterable findClosedProjectionBy(); + + @Query(sort = "{ age : -1 }") + List findByAgeGreaterThan(int age); + + @Query(sort = "{ age : -1 }") + List findByAgeGreaterThan(int age, Sort sort); + + // TODO: List findByFirstnameRegex(Pattern pattern); + + @Query(value = "{ 'id' : ?0 }", fields = "{ 'fans': { '$slice': [ ?1, ?2 ] } }") + Person findWithSliceInProjection(String id, int skip, int limit); + + @Query(value = "{ 'id' : ?0 }", fields = "{ 'firstname': { '$toUpper': '$firstname' } }") + Person findWithAggregationInProjection(String id); + + @Query(value = "{ 'shippingAddresses' : { '$elemMatch' : { 'city' : { '$eq' : 'lnz' } } } }", + fields = "{ 'shippingAddresses.$': ?0 }") + Person findWithArrayPositionInProjection(int position); + + @Query(value = "{ 'fans' : { '$elemMatch' : { '$ref' : 'user' } } }", fields = "{ 'fans.$': ?0 }") + Person findWithArrayPositionInProjectionWithDbRef(int position); + + @Aggregation("{ '$project': { '_id' : '$lastname' } }") + List findAllLastnames(); + + @Aggregation("{ '$project': { '_id' : '$lastname' } }") + Stream findAllLastnamesAsStream(); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + Stream groupStreamByLastnameAnd(String property); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + List groupByLastnameAnd(String property); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + Slice groupByLastnameAndAsSlice(String property, Pageable pageable); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + List groupByLastnameAnd(String property, Sort sort); + + @Aggregation("{ '$group': { '_id' : '$lastname', names : { $addToSet : '$?0' } } }") + List groupByLastnameAnd(String property, Pageable page); + + @Aggregation(pipeline = "{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }") + int sumAge(); + + @Aggregation(pipeline = "{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }") + AggregationResults sumAgeAndReturnAggregationResultWrapper(); + + @Aggregation(pipeline = "{ '$group' : { '_id' : null, 'total' : { $sum: '$age' } } }") + AggregationResults sumAgeAndReturnAggregationResultWrapperWithConcreteType(); + + @Aggregation({ "{ '$match' : { 'lastname' : 'Matthews'} }", + "{ '$project': { _id : 0, firstname : 1, lastname : 1 } }" }) + Iterable findAggregatedClosedInterfaceProjectionBy(); + + @Query(value = "{_id:?0}") + Optional findDocumentById(String id); + + @Query(value = "{ 'firstname' : ?0, 'lastname' : ?1, 'email' : ?2 , 'age' : ?3, 'sex' : ?4, " + + "'createdAt' : ?5, 'skills' : ?6, 'address.street' : ?7, 'address.zipCode' : ?8, " // + + "'address.city' : ?9, 'uniqueId' : ?10, 'credentials.username' : ?11, 'credentials.password' : ?12 }") + Person findPersonByManyArguments(String firstname, String lastname, String email, Integer age, Sex sex, + Date createdAt, List skills, String street, String zipCode, // + String city, UUID uniqueId, String username, String password); + + List findByUnwrappedUserUsername(String username); + + List findByUnwrappedUser(User user); + + int findAndUpdateViaMethodArgAllByLastname(String lastname, UpdateDefinition update); + + @Update("{ '$inc' : { 'visits' : ?1 } }") + int findAndIncrementVisitsByLastname(String lastname, int increment); + + @Query("{ 'lastname' : ?0 }") + @Update("{ '$inc' : { 'visits' : ?1 } }") + int updateAllByLastname(String lastname, int increment); + + @Update(pipeline = { "{ '$set' : { 'visits' : { '$add' : [ '$visits', ?1 ] } } }" }) + void findAndIncrementVisitsViaPipelineByLastname(String lastname, int increment); + + @Update("{ '$inc' : { 'visits' : ?#{[1]} } }") + int findAndIncrementVisitsUsingSpELByLastname(String lastname, int increment); + + @Update("{ '$push' : { 'shippingAddresses' : ?1 } }") + int findAndPushShippingAddressByEmail(String email, Address address); + + @Query("{ 'age' : null }") + Person findByQueryWithNullEqualityCheck(); + + List findBySpiritAnimal(User user); + +} diff --git a/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/repository/SmallerRepositoryBenchmark.java b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/repository/SmallerRepositoryBenchmark.java new file mode 100644 index 0000000000..f461a22d31 --- /dev/null +++ b/spring-data-mongodb/src/jmh/java/org/springframework/data/mongodb/repository/SmallerRepositoryBenchmark.java @@ -0,0 +1,83 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.repository; + +import org.junit.platform.commons.annotation.Testable; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.Level; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; + +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.microbenchmark.AbstractMicrobenchmark; +import org.springframework.data.mongodb.repository.support.MongoRepositoryFactory; + +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; + +/** + * Benchmark for AOT repositories. + * + * @author Mark Paluch + */ +@Testable +public class SmallerRepositoryBenchmark extends AbstractMicrobenchmark { + + @State(Scope.Benchmark) + public static class BenchmarkParameters { + + MongoClient mongoClient; + MongoTemplate mongoTemplate; + SmallerPersonRepository repositoryProxy; + + @Setup(Level.Trial) + public void doSetup() { + + mongoClient = MongoClients.create(); + mongoTemplate = new MongoTemplate(mongoClient, "jmh"); + repositoryProxy = createRepository(); + } + + @TearDown(Level.Trial) + public void doTearDown() { + mongoClient.close(); + } + + public SmallerPersonRepository createRepository() { + MongoRepositoryFactory repositoryFactory = new MongoRepositoryFactory(mongoTemplate); + return repositoryFactory.getRepository(SmallerPersonRepository.class); + } + + } + + @Benchmark + public SmallerPersonRepository repositoryBootstrap(BenchmarkParameters parameters) { + return parameters.createRepository(); + } + + @Benchmark + public Object findDerived(BenchmarkParameters parameters) { + return parameters.repositoryProxy.findByFirstname("foo"); + } + + @Benchmark + public Object findAnnotated(BenchmarkParameters parameters) { + return parameters.repositoryProxy.findByThePersonsFirstname("foo"); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java new file mode 100644 index 0000000000..3ae41aad35 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BindableMongoExpression.java @@ -0,0 +1,151 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.util.Arrays; + +import org.bson.Document; +import org.bson.codecs.DocumentCodec; +import org.bson.codecs.configuration.CodecRegistry; +import org.jspecify.annotations.Nullable; +import org.springframework.data.mongodb.util.json.ParameterBindingDocumentCodec; +import org.springframework.data.util.Lazy; +import org.springframework.lang.Contract; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * A {@link MongoExpression} using the {@link ParameterBindingDocumentCodec} for parsing a raw ({@literal json}) + * expression. The expression will be wrapped within { ... } if necessary. The actual parsing and parameter + * binding of placeholders like {@code ?0} is delayed upon first call on the target {@link Document} via + * {@link #toDocument()}.
+ * + *
+ * $toUpper : $name                -> { '$toUpper' : '$name' }
+ *
+ * { '$toUpper' : '$name' }        -> { '$toUpper' : '$name' }
+ *
+ * { '$toUpper' : '?0' }, "$name"  -> { '$toUpper' : '$name' }
+ * 
+ * + * Some types might require a special {@link org.bson.codecs.Codec}. If so, make sure to provide a {@link CodecRegistry} + * containing the required {@link org.bson.codecs.Codec codec} via {@link #withCodecRegistry(CodecRegistry)}. + * + * @author Christoph Strobl + * @author Giacomo Baso + * @since 3.2 + */ +public class BindableMongoExpression implements MongoExpression { + + private final String expressionString; + + private final @Nullable CodecRegistryProvider codecRegistryProvider; + + private final Object @Nullable [] args; + + private final Lazy target; + + /** + * Create a new instance of {@link BindableMongoExpression}. + * + * @param expression must not be {@literal null}. + * @param args must not be {@literal null} but may contain {@literal null} elements. + */ + public BindableMongoExpression(String expression, Object @Nullable [] args) { + this(expression, null, args); + } + + /** + * Create a new instance of {@link BindableMongoExpression}. + * + * @param expression must not be {@literal null}. + * @param codecRegistryProvider can be {@literal null}. + * @param args must not be {@literal null} but may contain {@literal null} elements. + */ + public BindableMongoExpression(String expression, @Nullable CodecRegistryProvider codecRegistryProvider, + Object @Nullable [] args) { + + Assert.notNull(expression, "Expression must not be null"); + + this.expressionString = expression; + this.codecRegistryProvider = codecRegistryProvider; + this.args = args; + this.target = Lazy.of(this::parse); + } + + /** + * Provide the {@link CodecRegistry} used to convert expressions. + * + * @param codecRegistry must not be {@literal null}. + * @return new instance of {@link BindableMongoExpression}. + */ + @Contract("_ -> new") + public BindableMongoExpression withCodecRegistry(CodecRegistry codecRegistry) { + return new BindableMongoExpression(expressionString, () -> codecRegistry, args); + } + + /** + * Provide the arguments to bind to the placeholders via their index. + * + * @param args must not be {@literal null}. + * @return new instance of {@link BindableMongoExpression}. + */ + @Contract("_ -> new") + public BindableMongoExpression bind(Object... args) { + return new BindableMongoExpression(expressionString, codecRegistryProvider, args); + } + + @Override + public Document toDocument() { + return target.get(); + } + + @Override + public String toString() { + return "BindableMongoExpression{" + "expressionString='" + expressionString + '\'' + ", args=" + + Arrays.toString(args) + '}'; + } + + private Document parse() { + + String expression = wrapJsonIfNecessary(expressionString); + + if (ObjectUtils.isEmpty(args)) { + + if (codecRegistryProvider == null) { + return Document.parse(expression); + } + + return Document.parse(expression, codecRegistryProvider.getCodecFor(Document.class) + .orElseGet(() -> new DocumentCodec(codecRegistryProvider.getCodecRegistry()))); + } + + ParameterBindingDocumentCodec codec = codecRegistryProvider == null ? new ParameterBindingDocumentCodec() + : new ParameterBindingDocumentCodec(codecRegistryProvider.getCodecRegistry()); + return codec.decode(expression, args); + } + + private static String wrapJsonIfNecessary(String json) { + + if (!StringUtils.hasText(json)) { + return json; + } + + String raw = json.trim(); + return (raw.startsWith("{") && raw.endsWith("}")) ? raw : "{%s}".formatted(raw); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java index 5b4120389d..12d8c966af 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/BulkOperationException.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,11 +17,12 @@ import java.util.List; +import org.jspecify.annotations.Nullable; import org.springframework.dao.DataAccessException; -import com.mongodb.BulkWriteError; -import com.mongodb.BulkWriteException; -import com.mongodb.BulkWriteResult; +import com.mongodb.MongoBulkWriteException; +import com.mongodb.bulk.BulkWriteError; +import com.mongodb.bulk.BulkWriteResult; /** * Is thrown when errors occur during bulk operations. @@ -38,12 +39,12 @@ public class BulkOperationException extends DataAccessException { private final BulkWriteResult result; /** - * Creates a new {@link BulkOperationException} with the given message and source {@link BulkWriteException}. + * Creates a new {@link BulkOperationException} with the given message and source {@link MongoBulkWriteException}. * - * @param message must not be {@literal null}. + * @param message can be {@literal null}. * @param source must not be {@literal null}. */ - public BulkOperationException(String message, BulkWriteException source) { + public BulkOperationException(@Nullable String message, MongoBulkWriteException source) { super(message, source); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CannotGetMongoDbConnectionException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CannotGetMongoDbConnectionException.java deleted file mode 100644 index 4d7d4d8752..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CannotGetMongoDbConnectionException.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright 2010-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb; - -import org.springframework.dao.DataAccessResourceFailureException; -import org.springframework.data.authentication.UserCredentials; -import org.springframework.lang.Nullable; - -/** - * Exception being thrown in case we cannot connect to a MongoDB instance. - * - * @author Oliver Gierke - * @author Mark Paluch - */ -public class CannotGetMongoDbConnectionException extends DataAccessResourceFailureException { - - private final UserCredentials credentials; - private final @Nullable String database; - - private static final long serialVersionUID = 1172099106475265589L; - - public CannotGetMongoDbConnectionException(String msg, Throwable cause) { - super(msg, cause); - this.database = null; - this.credentials = UserCredentials.NO_CREDENTIALS; - } - - public CannotGetMongoDbConnectionException(String msg) { - this(msg, null, UserCredentials.NO_CREDENTIALS); - } - - public CannotGetMongoDbConnectionException(String msg, @Nullable String database, UserCredentials credentials) { - super(msg); - this.database = database; - this.credentials = credentials; - } - - /** - * Returns the {@link UserCredentials} that were used when trying to connect to the MongoDB instance. - * - * @return - */ - public UserCredentials getCredentials() { - return this.credentials; - } - - /** - * Returns the name of the database trying to be accessed. - * - * @return - */ - @Nullable - public String getDatabase() { - return database; - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java index 89db56b525..c59eecb43a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ClientSessionException.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,8 @@ */ package org.springframework.data.mongodb; +import org.jspecify.annotations.Nullable; import org.springframework.dao.NonTransientDataAccessException; -import org.springframework.lang.Nullable; /** * {@link NonTransientDataAccessException} specific to MongoDB {@link com.mongodb.session.ClientSession} related data diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java index 6c1992fc54..53515f9fcd 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/CodecRegistryProvider.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -62,7 +62,7 @@ default boolean hasCodecFor(Class type) { */ default Optional> getCodecFor(Class type) { - Assert.notNull(type, "Type must not be null!"); + Assert.notNull(type, "Type must not be null"); try { return Optional.of(getCodecRegistry().get(type)); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolver.java new file mode 100644 index 0000000000..87201ef9ee --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/DefaultMongoTransactionOptionsResolver.java @@ -0,0 +1,58 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.util.Map; +import java.util.Set; + +import org.jspecify.annotations.Nullable; + +/** + * Default implementation of {@link MongoTransactionOptions} using {@literal mongo:} as {@link #getLabelPrefix() label + * prefix} creating {@link SimpleMongoTransactionOptions} out of a given argument {@link Map}. Uses + * {@link SimpleMongoTransactionOptions#KNOWN_KEYS} to validate entries in arguments to resolve and errors on unknown + * entries. + * + * @author Christoph Strobl + * @since 4.3 + */ +enum DefaultMongoTransactionOptionsResolver implements MongoTransactionOptionsResolver { + + INSTANCE; + + private static final String PREFIX = "mongo:"; + + @Override + public MongoTransactionOptions convert(Map options) { + + validateKeys(options.keySet()); + return SimpleMongoTransactionOptions.of(options); + } + + @Override + public @Nullable String getLabelPrefix() { + return PREFIX; + } + + private static void validateKeys(Set keys) { + + if (!SimpleMongoTransactionOptions.KNOWN_KEYS.containsAll(keys)) { + + throw new IllegalArgumentException("Transaction labels contained invalid values. Has to be one of %s" + .formatted(SimpleMongoTransactionOptions.KNOWN_KEYS)); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java index 24d35908c5..f95a3c5310 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/InvalidMongoDbApiUsageException.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java index bbcd737854..3fc3f82fbf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/LazyLoadingException.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -25,8 +25,10 @@ public class LazyLoadingException extends UncategorizedDataAccessException { private static final long serialVersionUID = -7089224903873220037L; /** - * @param msg - * @param cause + * Constructor for LazyLoadingException. + * + * @param msg the detail message. + * @param cause the exception thrown by underlying data access API. */ public LazyLoadingException(String msg, Throwable cause) { super(msg, cause); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java index 3fdff0ec6a..72b2794d05 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoCollectionUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,8 +20,8 @@ /** * Helper class featuring helper methods for working with MongoDb collections. - *

- *

+ *
+ *
* Mainly intended for internal use within the framework. * * @author Thomas Risberg diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDbFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseFactory.java similarity index 60% rename from spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDbFactory.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseFactory.java index 15a9ae691b..1fcd5de516 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDbFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseFactory.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,7 +21,6 @@ import org.springframework.data.mongodb.core.MongoExceptionTranslator; import com.mongodb.ClientSessionOptions; -import com.mongodb.DB; import com.mongodb.client.ClientSession; import com.mongodb.client.MongoDatabase; @@ -31,25 +30,26 @@ * @author Mark Pollack * @author Thomas Darimont * @author Christoph Strobl + * @since 3.0 */ -public interface MongoDbFactory extends CodecRegistryProvider, MongoSessionProvider { +public interface MongoDatabaseFactory extends CodecRegistryProvider, MongoSessionProvider { /** - * Creates a default {@link MongoDatabase} instance. + * Obtain a {@link MongoDatabase} from the underlying factory. * - * @return + * @return never {@literal null}. * @throws DataAccessException */ - MongoDatabase getDb() throws DataAccessException; + MongoDatabase getMongoDatabase() throws DataAccessException; /** - * Creates a {@link DB} instance to access the database with the given name. + * Obtain a {@link MongoDatabase} instance to access the database with the given name. * - * @param dbName must not be {@literal null} or empty. - * @return + * @param dbName must not be {@literal null}. + * @return never {@literal null}. * @throws DataAccessException */ - MongoDatabase getDb(String dbName) throws DataAccessException; + MongoDatabase getMongoDatabase(String dbName) throws DataAccessException; /** * Exposes a shared {@link MongoExceptionTranslator}. @@ -58,16 +58,6 @@ public interface MongoDbFactory extends CodecRegistryProvider, MongoSessionProvi */ PersistenceExceptionTranslator getExceptionTranslator(); - /** - * Get the legacy database entry point. Please consider {@link #getDb()} instead. - * - * @return - * @deprecated since 2.1, use {@link #getDb()}. This method will be removed with a future version as it works only - * with the legacy MongoDB driver. - */ - @Deprecated - DB getLegacyDb(); - /** * Get the underlying {@link CodecRegistry} used by the MongoDB Java driver. * @@ -75,7 +65,7 @@ public interface MongoDbFactory extends CodecRegistryProvider, MongoSessionProvi */ @Override default CodecRegistry getCodecRegistry() { - return getDb().getCodecRegistry(); + return getMongoDatabase().getCodecRegistry(); } /** @@ -88,24 +78,35 @@ default CodecRegistry getCodecRegistry() { ClientSession getSession(ClientSessionOptions options); /** - * Obtain a {@link ClientSession} bound instance of {@link MongoDbFactory} returning {@link MongoDatabase} instances - * that are aware and bound to a new session with given {@link ClientSessionOptions options}. + * Obtain a {@link ClientSession} bound instance of {@link MongoDatabaseFactory} returning {@link MongoDatabase} + * instances that are aware and bound to a new session with given {@link ClientSessionOptions options}. * * @param options must not be {@literal null}. * @return never {@literal null}. * @since 2.1 */ - default MongoDbFactory withSession(ClientSessionOptions options) { + default MongoDatabaseFactory withSession(ClientSessionOptions options) { return withSession(getSession(options)); } /** - * Obtain a {@link ClientSession} bound instance of {@link MongoDbFactory} returning {@link MongoDatabase} instances - * that are aware and bound to the given session. + * Obtain a {@link ClientSession} bound instance of {@link MongoDatabaseFactory} returning {@link MongoDatabase} + * instances that are aware and bound to the given session. * * @param session must not be {@literal null}. * @return never {@literal null}. * @since 2.1 */ - MongoDbFactory withSession(ClientSession session); + MongoDatabaseFactory withSession(ClientSession session); + + /** + * Returns if the given {@link MongoDatabaseFactory} is bound to a {@link ClientSession} that has an + * {@link ClientSession#hasActiveTransaction() active transaction}. + * + * @return {@literal true} if there's an active transaction, {@literal false} otherwise. + * @since 2.1.3 + */ + default boolean isTransactionActive() { + return false; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java index 713fc73dd3..042a5ba1d3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoDatabaseUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,7 @@ */ package org.springframework.data.mongodb; -import org.springframework.lang.Nullable; +import org.jspecify.annotations.Nullable; import org.springframework.transaction.support.ResourceHolderSynchronization; import org.springframework.transaction.support.TransactionSynchronization; import org.springframework.transaction.support.TransactionSynchronizationManager; @@ -27,10 +27,9 @@ import com.mongodb.client.MongoDatabase; /** - * Helper class for managing a {@link MongoDatabase} instances via {@link MongoDbFactory}. Used for obtaining + * Helper class for managing a {@link MongoDatabase} instances via {@link MongoDatabaseFactory}. Used for obtaining * {@link ClientSession session bound} resources, such as {@link MongoDatabase} and - * {@link com.mongodb.client.MongoCollection} suitable for transactional usage. - *

+ * {@link com.mongodb.client.MongoCollection} suitable for transactional usage.
* Note: Intended for internal usage only. * * @author Christoph Strobl @@ -41,85 +40,103 @@ public class MongoDatabaseUtils { /** - * Obtain the default {@link MongoDatabase database} form the given {@link MongoDbFactory factory} using - * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. - *

+ * Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory} using + * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * - * @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from. + * @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from. * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. */ - public static MongoDatabase getDatabase(MongoDbFactory factory) { + public static MongoDatabase getDatabase(MongoDatabaseFactory factory) { return doGetMongoDatabase(null, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION); } /** - * Obtain the default {@link MongoDatabase database} form the given {@link MongoDbFactory factory}. - *

+ * Obtain the default {@link MongoDatabase database} form the given {@link MongoDatabaseFactory factory}.
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * - * @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from. + * @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from. * @param sessionSynchronization the synchronization to use. Must not be {@literal null}. * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. */ - public static MongoDatabase getDatabase(MongoDbFactory factory, SessionSynchronization sessionSynchronization) { + public static MongoDatabase getDatabase(MongoDatabaseFactory factory, SessionSynchronization sessionSynchronization) { return doGetMongoDatabase(null, factory, sessionSynchronization); } /** - * Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDbFactory factory} using - * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}. - *

+ * Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory} using + * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. * * @param dbName the name of the {@link MongoDatabase} to get. - * @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from. + * @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from. * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. */ - public static MongoDatabase getDatabase(String dbName, MongoDbFactory factory) { + public static MongoDatabase getDatabase(@Nullable String dbName, MongoDatabaseFactory factory) { return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION); } /** - * Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDbFactory factory}. - *

+ * Obtain the {@link MongoDatabase database} with given name form the given {@link MongoDatabaseFactory factory}. + *
* Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the current * {@link Thread} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. - * + * * @param dbName the name of the {@link MongoDatabase} to get. - * @param factory the {@link MongoDbFactory} to get the {@link MongoDatabase} from. + * @param factory the {@link MongoDatabaseFactory} to get the {@link MongoDatabase} from. * @param sessionSynchronization the synchronization to use. Must not be {@literal null}. * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. */ - public static MongoDatabase getDatabase(String dbName, MongoDbFactory factory, + public static MongoDatabase getDatabase(@Nullable String dbName, MongoDatabaseFactory factory, SessionSynchronization sessionSynchronization) { return doGetMongoDatabase(dbName, factory, sessionSynchronization); } - private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDbFactory factory, + private static MongoDatabase doGetMongoDatabase(@Nullable String dbName, MongoDatabaseFactory factory, SessionSynchronization sessionSynchronization) { - Assert.notNull(factory, "Factory must not be null!"); + Assert.notNull(factory, "Factory must not be null"); - if (!TransactionSynchronizationManager.isSynchronizationActive()) { - return StringUtils.hasText(dbName) ? factory.getDb(dbName) : factory.getDb(); + if (sessionSynchronization == SessionSynchronization.NEVER + || !TransactionSynchronizationManager.isSynchronizationActive()) { + return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase(); } ClientSession session = doGetSession(factory, sessionSynchronization); - if(session == null) { - return StringUtils.hasText(dbName) ? factory.getDb(dbName) : factory.getDb(); + if (session == null) { + return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase(); } - MongoDbFactory factoryToUse = factory.withSession(session); - return StringUtils.hasText(dbName) ? factoryToUse.getDb(dbName) : factoryToUse.getDb(); + MongoDatabaseFactory factoryToUse = factory.withSession(session); + return StringUtils.hasText(dbName) ? factoryToUse.getMongoDatabase(dbName) : factoryToUse.getMongoDatabase(); } - @Nullable - private static ClientSession doGetSession(MongoDbFactory dbFactory, SessionSynchronization sessionSynchronization) { + /** + * Check if the {@link MongoDatabaseFactory} is actually bound to a {@link ClientSession} that has an active + * transaction, or if a {@link TransactionSynchronization} has been registered for the {@link MongoDatabaseFactory + * resource} and if the associated {@link ClientSession} has an {@link ClientSession#hasActiveTransaction() active + * transaction}. + * + * @param dbFactory the resource to check transactions for. Must not be {@literal null}. + * @return {@literal true} if the factory has an ongoing transaction. + * @since 2.1.3 + */ + public static boolean isTransactionActive(MongoDatabaseFactory dbFactory) { + + if (dbFactory.isTransactionActive()) { + return true; + } + + MongoResourceHolder resourceHolder = (MongoResourceHolder) TransactionSynchronizationManager.getResource(dbFactory); + return resourceHolder != null && resourceHolder.hasActiveTransaction(); + } + + private static @Nullable ClientSession doGetSession(MongoDatabaseFactory dbFactory, + SessionSynchronization sessionSynchronization) { MongoResourceHolder resourceHolder = (MongoResourceHolder) TransactionSynchronizationManager.getResource(dbFactory); @@ -140,7 +157,7 @@ private static ClientSession doGetSession(MongoDbFactory dbFactory, SessionSynch // init a non native MongoDB transaction by registering a MongoSessionSynchronization resourceHolder = new MongoResourceHolder(createClientSession(dbFactory), dbFactory); - resourceHolder.getSession().startTransaction(); + resourceHolder.getRequiredSession().startTransaction(); TransactionSynchronizationManager .registerSynchronization(new MongoSessionSynchronization(resourceHolder, dbFactory)); @@ -150,7 +167,7 @@ private static ClientSession doGetSession(MongoDbFactory dbFactory, SessionSynch return resourceHolder.getSession(); } - private static ClientSession createClientSession(MongoDbFactory dbFactory) { + private static ClientSession createClientSession(MongoDatabaseFactory dbFactory) { return dbFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build()); } @@ -165,66 +182,41 @@ private static class MongoSessionSynchronization extends ResourceHolderSynchroni private final MongoResourceHolder resourceHolder; - MongoSessionSynchronization(MongoResourceHolder resourceHolder, MongoDbFactory dbFactory) { + MongoSessionSynchronization(MongoResourceHolder resourceHolder, MongoDatabaseFactory dbFactory) { super(resourceHolder, dbFactory); this.resourceHolder = resourceHolder; } - /* - * (non-Javadoc) - * @see org.springframework.transaction.support.ResourceHolderSynchronization#shouldReleaseBeforeCompletion() - */ @Override protected boolean shouldReleaseBeforeCompletion() { return false; } - /* - * (non-Javadoc) - * @see org.springframework.transaction.support.ResourceHolderSynchronization#processResourceAfterCommit(java.lang.Object) - */ @Override protected void processResourceAfterCommit(MongoResourceHolder resourceHolder) { - if (isTransactionActive(resourceHolder)) { - resourceHolder.getSession().commitTransaction(); + if (resourceHolder.hasActiveTransaction()) { + resourceHolder.getRequiredSession().commitTransaction(); } } - /* - * (non-Javadoc) - * @see org.springframework.transaction.support.ResourceHolderSynchronization#afterCompletion(int) - */ @Override public void afterCompletion(int status) { - if (status == TransactionSynchronization.STATUS_ROLLED_BACK && isTransactionActive(this.resourceHolder)) { - resourceHolder.getSession().abortTransaction(); + if (status == TransactionSynchronization.STATUS_ROLLED_BACK && this.resourceHolder.hasActiveTransaction()) { + resourceHolder.getRequiredSession().abortTransaction(); } super.afterCompletion(status); } - /* - * (non-Javadoc) - * @see org.springframework.transaction.support.ResourceHolderSynchronization#releaseResource(java.lang.Object, java.lang.Object) - */ @Override protected void releaseResource(MongoResourceHolder resourceHolder, Object resourceKey) { if (resourceHolder.hasActiveSession()) { - resourceHolder.getSession().close(); - } - } - - private boolean isTransactionActive(MongoResourceHolder resourceHolder) { - - if (!resourceHolder.hasSession()) { - return false; + resourceHolder.getRequiredSession().close(); } - - return resourceHolder.getSession().hasActiveTransaction(); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java new file mode 100644 index 0000000000..a087439d72 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoExpression.java @@ -0,0 +1,73 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +/** + * Wrapper object for MongoDB expressions like {@code $toUpper : $name} that manifest as {@link org.bson.Document} when + * passed on to the driver. + *
+ * A set of predefined {@link MongoExpression expressions}, including a + * {@link org.springframework.data.mongodb.core.aggregation.AggregationSpELExpression SpEL based variant} for method + * like expressions (eg. {@code toUpper(name)}) are available via the + * {@link org.springframework.data.mongodb.core.aggregation Aggregation API}. + * + * @author Christoph Strobl + * @since 3.2 + * @see org.springframework.data.mongodb.core.aggregation.ArithmeticOperators + * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators + * @see org.springframework.data.mongodb.core.aggregation.ComparisonOperators + * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators + * @see org.springframework.data.mongodb.core.aggregation.ConvertOperators + * @see org.springframework.data.mongodb.core.aggregation.DateOperators + * @see org.springframework.data.mongodb.core.aggregation.ObjectOperators + * @see org.springframework.data.mongodb.core.aggregation.SetOperators + * @see org.springframework.data.mongodb.core.aggregation.StringOperators + */ +@FunctionalInterface +public interface MongoExpression { + + /** + * Create a new {@link MongoExpression} from plain {@link String} (eg. {@code $toUpper : $name}).
+ * The given expression will be wrapped with { ... } to match an actual MongoDB {@link org.bson.Document} + * if necessary. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link MongoExpression}. + */ + static MongoExpression create(String expression) { + return new BindableMongoExpression(expression, null); + } + + /** + * Create a new {@link MongoExpression} from plain {@link String} containing placeholders (eg. {@code $toUpper : ?0}) + * that will be resolved on first call of {@link #toDocument()}.
+ * The given expression will be wrapped with { ... } to match an actual MongoDB {@link org.bson.Document} + * if necessary. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link MongoExpression}. + */ + static MongoExpression create(String expression, Object... args) { + return new BindableMongoExpression(expression, args); + } + + /** + * Obtain the native {@link org.bson.Document} representation. + * + * @return never {@literal null}. + */ + org.bson.Document toDocument(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoManagedTypes.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoManagedTypes.java new file mode 100644 index 0000000000..39c4815d47 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoManagedTypes.java @@ -0,0 +1,81 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.util.Arrays; +import java.util.function.Consumer; + +import org.springframework.data.domain.ManagedTypes; + +/** + * @author Christoph Strobl + * @since 4.0 + */ +public final class MongoManagedTypes implements ManagedTypes { + + private final ManagedTypes delegate; + + private MongoManagedTypes(ManagedTypes types) { + this.delegate = types; + } + + /** + * Wraps an existing {@link ManagedTypes} object with {@link MongoManagedTypes}. + * + * @param managedTypes + * @return + */ + public static MongoManagedTypes from(ManagedTypes managedTypes) { + return new MongoManagedTypes(managedTypes); + } + + /** + * Factory method used to construct {@link MongoManagedTypes} from the given array of {@link Class types}. + * + * @param types array of {@link Class types} used to initialize the {@link ManagedTypes}; must not be {@literal null}. + * @return new instance of {@link MongoManagedTypes} initialized from {@link Class types}. + */ + public static MongoManagedTypes from(Class... types) { + return fromIterable(Arrays.asList(types)); + } + + /** + * Factory method used to construct {@link MongoManagedTypes} from the given, required {@link Iterable} of + * {@link Class types}. + * + * @param types {@link Iterable} of {@link Class types} used to initialize the {@link ManagedTypes}; must not be + * {@literal null}. + * @return new instance of {@link MongoManagedTypes} initialized the given, required {@link Iterable} of {@link Class + * types}. + */ + public static MongoManagedTypes fromIterable(Iterable> types) { + return from(ManagedTypes.fromIterable(types)); + } + + /** + * Factory method to return an empty {@link MongoManagedTypes} object. + * + * @return an empty {@link MongoManagedTypes} object. + */ + public static MongoManagedTypes empty() { + return from(ManagedTypes.empty()); + } + + @Override + public void forEach(Consumer> action) { + delegate.forEach(action); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java index f77d0ea6c2..81c25d0998 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoResourceHolder.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,7 @@ */ package org.springframework.data.mongodb; -import org.springframework.lang.Nullable; +import org.jspecify.annotations.Nullable; import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.support.ResourceHolderSupport; @@ -23,11 +23,11 @@ /** * MongoDB specific {@link ResourceHolderSupport resource holder}, wrapping a {@link ClientSession}. - * {@link MongoTransactionManager} binds instances of this class to the thread. - *

+ * {@link MongoTransactionManager} binds instances of this class to the thread.
* Note: Intended for internal usage only. * * @author Christoph Strobl + * @author Mark Paluch * @since 2.1 * @see MongoTransactionManager * @see org.springframework.data.mongodb.core.MongoTemplate @@ -35,15 +35,15 @@ class MongoResourceHolder extends ResourceHolderSupport { private @Nullable ClientSession session; - private MongoDbFactory dbFactory; + private MongoDatabaseFactory dbFactory; /** * Create a new {@link MongoResourceHolder} for a given {@link ClientSession session}. * * @param session the associated {@link ClientSession}. Can be {@literal null}. - * @param dbFactory the associated {@link MongoDbFactory}. must not be {@literal null}. + * @param dbFactory the associated {@link MongoDatabaseFactory}. must not be {@literal null}. */ - MongoResourceHolder(@Nullable ClientSession session, MongoDbFactory dbFactory) { + MongoResourceHolder(@Nullable ClientSession session, MongoDatabaseFactory dbFactory) { this.session = session; this.dbFactory = dbFactory; @@ -58,9 +58,25 @@ ClientSession getSession() { } /** - * @return the associated {@link MongoDbFactory}. + * @return the required associated {@link ClientSession}. + * @throws IllegalStateException if no {@link ClientSession} is associated with this {@link MongoResourceHolder}. + * @since 2.1.3 */ - public MongoDbFactory getDbFactory() { + ClientSession getRequiredSession() { + + ClientSession session = getSession(); + + if (session == null) { + throw new IllegalStateException("No session available"); + } + + return session; + } + + /** + * @return the associated {@link MongoDatabaseFactory}. + */ + public MongoDatabaseFactory getDbFactory() { return dbFactory; } @@ -101,7 +117,21 @@ boolean hasActiveSession() { return false; } - return hasServerSession() && !getSession().getServerSession().isClosed(); + return hasServerSession() && !getRequiredSession().getServerSession().isClosed(); + } + + /** + * @return {@literal true} if the session has an active transaction. + * @since 2.1.3 + * @see #hasActiveSession() + */ + boolean hasActiveTransaction() { + + if (!hasActiveSession()) { + return false; + } + + return getRequiredSession().hasActiveTransaction(); } /** @@ -111,7 +141,7 @@ boolean hasActiveSession() { boolean hasServerSession() { try { - return getSession().getServerSession() != null; + return getRequiredSession().getServerSession() != null; } catch (IllegalStateException serverSessionClosed) { // ignore } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java index 45ca6c2abe..645b3508db 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoSessionProvider.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionException.java new file mode 100644 index 0000000000..3d7bec6780 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionException.java @@ -0,0 +1,47 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.jspecify.annotations.Nullable; + +/** + * A specific {@link ClientSessionException} related to issues with a transaction such as aborted or non existing + * transactions. + * + * @author Christoph Strobl + * @since 2.1 + */ +public class MongoTransactionException extends ClientSessionException { + + /** + * Constructor for {@link MongoTransactionException}. + * + * @param msg the detail message. Must not be {@literal null}. + */ + public MongoTransactionException(String msg) { + super(msg); + } + + /** + * Constructor for {@link ClientSessionException}. + * + * @param msg the detail message. Can be {@literal null}. + * @param cause the root cause. Can be {@literal null}. + */ + public MongoTransactionException(@Nullable String msg, @Nullable Throwable cause) { + super(msg, cause); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java index 5ec4e10e7d..1f97bb69e9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionManager.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,8 @@ */ package org.springframework.data.mongodb; +import org.jspecify.annotations.Nullable; import org.springframework.beans.factory.InitializingBean; -import org.springframework.lang.Nullable; import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.TransactionException; import org.springframework.transaction.TransactionSystemException; @@ -36,74 +36,91 @@ /** * A {@link org.springframework.transaction.PlatformTransactionManager} implementation that manages - * {@link ClientSession} based transactions for a single {@link MongoDbFactory}. - *

- * Binds a {@link ClientSession} from the specified {@link MongoDbFactory} to the thread. - *

+ * {@link ClientSession} based transactions for a single {@link MongoDatabaseFactory}.
+ * Binds a {@link ClientSession} from the specified {@link MongoDatabaseFactory} to the thread.
* {@link TransactionDefinition#isReadOnly() Readonly} transactions operate on a {@link ClientSession} and enable causal * consistency, and also {@link ClientSession#startTransaction() start}, {@link ClientSession#commitTransaction() - * commit} or {@link ClientSession#abortTransaction() abort} a transaction. - *

+ * commit} or {@link ClientSession#abortTransaction() abort} a transaction.
* Application code is required to retrieve the {@link com.mongodb.client.MongoDatabase} via - * {@link MongoDatabaseUtils#getDatabase(MongoDbFactory)} instead of a standard {@link MongoDbFactory#getDb()} call. - * Spring classes such as {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly. - * + * {@link MongoDatabaseUtils#getDatabase(MongoDatabaseFactory)} instead of a standard + * {@link MongoDatabaseFactory#getMongoDatabase()} call. Spring classes such as + * {@link org.springframework.data.mongodb.core.MongoTemplate} use this strategy implicitly.
+ * By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. One may override + * {@link #doCommit(MongoTransactionObject)} to implement the + * Retry Commit Operation + * behavior as outlined in the MongoDB reference manual. + * * @author Christoph Strobl * @author Mark Paluch * @currentRead Shadow's Edge - Brent Weeks * @since 2.1 * @see MongoDB Transaction Documentation - * @see MongoDatabaseUtils#getDatabase(MongoDbFactory, SessionSynchronization) + * @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization) */ public class MongoTransactionManager extends AbstractPlatformTransactionManager implements ResourceTransactionManager, InitializingBean { - private @Nullable MongoDbFactory dbFactory; - private @Nullable TransactionOptions options; + private @Nullable MongoDatabaseFactory databaseFactory; + private MongoTransactionOptions options; + private final MongoTransactionOptionsResolver transactionOptionsResolver; /** - * Create a new {@link MongoTransactionManager} for bean-style usage. - *

- * Note:The {@link MongoDbFactory db factory} has to be {@link #setDbFactory(MongoDbFactory) set} - * before using the instance. Use this constructor to prepare a {@link MongoTransactionManager} via a - * {@link org.springframework.beans.factory.BeanFactory}. - *

+ * Create a new {@link MongoTransactionManager} for bean-style usage.
+ * Note:The {@link MongoDatabaseFactory db factory} has to be + * {@link #setDatabaseFactory(MongoDatabaseFactory) set} before using the instance. Use this constructor to prepare a + * {@link MongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}.
* Optionally it is possible to set default {@link TransactionOptions transaction options} defining * {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}. - * - * @see #setDbFactory(MongoDbFactory) + * + * @see #setDatabaseFactory(MongoDatabaseFactory) * @see #setTransactionSynchronization(int) */ - public MongoTransactionManager() {} + public MongoTransactionManager() { + + this.transactionOptionsResolver = MongoTransactionOptionsResolver.defaultResolver(); + this.options = MongoTransactionOptions.NONE; + } /** - * Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDbFactory}. + * Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory}. * - * @param dbFactory must not be {@literal null}. + * @param databaseFactory must not be {@literal null}. */ - public MongoTransactionManager(MongoDbFactory dbFactory) { - this(dbFactory, null); + public MongoTransactionManager(MongoDatabaseFactory databaseFactory) { + this(databaseFactory, null); } /** - * Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDbFactory} applying the - * given {@link TransactionOptions options}, if present, when starting a new transaction. + * Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory} + * applying the given {@link TransactionOptions options}, if present, when starting a new transaction. * - * @param dbFactory must not be {@literal null}. + * @param databaseFactory must not be {@literal null}. * @param options can be {@literal null}. */ - public MongoTransactionManager(MongoDbFactory dbFactory, @Nullable TransactionOptions options) { + public MongoTransactionManager(MongoDatabaseFactory databaseFactory, @Nullable TransactionOptions options) { + this(databaseFactory, MongoTransactionOptionsResolver.defaultResolver(), MongoTransactionOptions.of(options)); + } + + /** + * Create a new {@link MongoTransactionManager} obtaining sessions from the given {@link MongoDatabaseFactory} + * applying the given {@link TransactionOptions options}, if present, when starting a new transaction. + * + * @param databaseFactory must not be {@literal null}. + * @param transactionOptionsResolver must not be {@literal null}. + * @param defaultTransactionOptions can be {@literal null}. + * @since 4.3 + */ + public MongoTransactionManager(MongoDatabaseFactory databaseFactory, + MongoTransactionOptionsResolver transactionOptionsResolver, MongoTransactionOptions defaultTransactionOptions) { - Assert.notNull(dbFactory, "DbFactory must not be null!"); + Assert.notNull(databaseFactory, "MongoDatabaseFactory must not be null"); + Assert.notNull(transactionOptionsResolver, "MongoTransactionOptionsResolver must not be null"); - this.dbFactory = dbFactory; - this.options = options; + this.databaseFactory = databaseFactory; + this.transactionOptionsResolver = transactionOptionsResolver; + this.options = defaultTransactionOptions; } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#doGetTransaction() - */ @Override protected Object doGetTransaction() throws TransactionException { @@ -112,19 +129,11 @@ protected Object doGetTransaction() throws TransactionException { return new MongoTransactionObject(resourceHolder); } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#isExistingTransaction(java.lang.Object) - */ @Override protected boolean isExistingTransaction(Object transaction) throws TransactionException { return extractMongoTransaction(transaction).hasResourceHolder(); } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#doBegin(java.lang.Object, org.springframework.transaction.TransactionDefinition) - */ @Override protected void doBegin(Object transaction, TransactionDefinition definition) throws TransactionException { @@ -140,7 +149,9 @@ protected void doBegin(Object transaction, TransactionDefinition definition) thr } try { - mongoTransactionObject.startTransaction(options); + MongoTransactionOptions mongoTransactionOptions = transactionOptionsResolver.resolve(definition) + .mergeWith(options); + mongoTransactionObject.startTransaction(mongoTransactionOptions.toDriverOptions()); } catch (MongoException ex) { throw new TransactionSystemException(String.format("Could not start Mongo transaction for session %s.", debugString(mongoTransactionObject.getSession())), ex); @@ -154,10 +165,6 @@ protected void doBegin(Object transaction, TransactionDefinition definition) thr TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), resourceHolder); } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#doSuspend(java.lang.Object) - */ @Override protected Object doSuspend(Object transaction) throws TransactionException { @@ -167,21 +174,13 @@ protected Object doSuspend(Object transaction) throws TransactionException { return TransactionSynchronizationManager.unbindResource(getRequiredDbFactory()); } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#doResume(java.lang.Object, java.lang.Object) - */ @Override protected void doResume(@Nullable Object transaction, Object suspendedResources) { TransactionSynchronizationManager.bindResource(getRequiredDbFactory(), suspendedResources); } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#doCommit(org.springframework.transaction.support.DefaultTransactionStatus) - */ @Override - protected void doCommit(DefaultTransactionStatus status) throws TransactionException { + protected final void doCommit(DefaultTransactionStatus status) throws TransactionException { MongoTransactionObject mongoTransactionObject = extractMongoTransaction(status); @@ -191,18 +190,46 @@ protected void doCommit(DefaultTransactionStatus status) throws TransactionExcep } try { - mongoTransactionObject.commitTransaction(); - } catch (MongoException ex) { + doCommit(mongoTransactionObject); + } catch (Exception ex) { throw new TransactionSystemException(String.format("Could not commit Mongo transaction for session %s.", debugString(mongoTransactionObject.getSession())), ex); } } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#doRollback(org.springframework.transaction.support.DefaultTransactionStatus) + /** + * Customization hook to perform an actual commit of the given transaction.
+ * If a commit operation encounters an error, the MongoDB driver throws a {@link MongoException} holding + * {@literal error labels}.
+ * By default those labels are ignored, nevertheless one might check for + * {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the + * commit.
+ * + *

+	 * 
+	 * int retries = 3;
+	 * do {
+	 *     try {
+	 *         transactionObject.commitTransaction();
+	 *         break;
+	 *     } catch (MongoException ex) {
+	 *         if (!ex.hasErrorLabel(MongoException.UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL)) {
+	 *             throw ex;
+	 *         }
+	 *     }
+	 *     Thread.sleep(500);
+	 * } while (--retries > 0);
+	 * 
+	 * 
+ * + * @param transactionObject never {@literal null}. + * @throws Exception in case of transaction errors. */ + protected void doCommit(MongoTransactionObject transactionObject) throws Exception { + transactionObject.commitTransaction(); + } + @Override protected void doRollback(DefaultTransactionStatus status) throws TransactionException { @@ -222,10 +249,6 @@ protected void doRollback(DefaultTransactionStatus status) throws TransactionExc } } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#doSetRollbackOnly(org.springframework.transaction.support.DefaultTransactionStatus) - */ @Override protected void doSetRollbackOnly(DefaultTransactionStatus status) throws TransactionException { @@ -233,10 +256,6 @@ protected void doSetRollbackOnly(DefaultTransactionStatus status) throws Transac transactionObject.getRequiredResourceHolder().setRollbackOnly(); } - /* - * (non-Javadoc) - * org.springframework.transaction.support.AbstractPlatformTransactionManager#doCleanupAfterCompletion(java.lang.Object) - */ @Override protected void doCleanupAfterCompletion(Object transaction) { @@ -259,14 +278,14 @@ protected void doCleanupAfterCompletion(Object transaction) { } /** - * Set the {@link MongoDbFactory} that this instance should manage transactions for. + * Set the {@link MongoDatabaseFactory} that this instance should manage transactions for. * - * @param dbFactory must not be {@literal null}. + * @param databaseFactory must not be {@literal null}. */ - public void setDbFactory(MongoDbFactory dbFactory) { + public void setDatabaseFactory(MongoDatabaseFactory databaseFactory) { - Assert.notNull(dbFactory, "DbFactory must not be null!"); - this.dbFactory = dbFactory; + Assert.notNull(databaseFactory, "DbFactory must not be null"); + this.databaseFactory = databaseFactory; } /** @@ -275,32 +294,23 @@ public void setDbFactory(MongoDbFactory dbFactory) { * @param options can be {@literal null}. */ public void setOptions(@Nullable TransactionOptions options) { - this.options = options; + this.options = MongoTransactionOptions.of(options); } /** - * Get the {@link MongoDbFactory} that this instance manages transactions for. + * Get the {@link MongoDatabaseFactory} that this instance manages transactions for. * * @return can be {@literal null}. */ - @Nullable - public MongoDbFactory getDbFactory() { - return dbFactory; + public @Nullable MongoDatabaseFactory getDatabaseFactory() { + return databaseFactory; } - /* - * (non-Javadoc) - * @see org.springframework.transaction.support.ResourceTransactionManager#getResourceFactory() - */ @Override - public MongoDbFactory getResourceFactory() { + public MongoDatabaseFactory getResourceFactory() { return getRequiredDbFactory(); } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.InitializingBean#afterPropertiesSet() - */ @Override public void afterPropertiesSet() { getRequiredDbFactory(); @@ -308,7 +318,7 @@ public void afterPropertiesSet() { private MongoResourceHolder newResourceHolder(TransactionDefinition definition, ClientSessionOptions options) { - MongoDbFactory dbFactory = getResourceFactory(); + MongoDatabaseFactory dbFactory = getResourceFactory(); MongoResourceHolder resourceHolder = new MongoResourceHolder(dbFactory.getSession(options), dbFactory); resourceHolder.setTimeoutIfNotDefaulted(determineTimeout(definition)); @@ -317,14 +327,14 @@ private MongoResourceHolder newResourceHolder(TransactionDefinition definition, } /** - * @throws IllegalStateException if {@link #dbFactory} is {@literal null}. + * @throws IllegalStateException if {@link #databaseFactory} is {@literal null}. */ - private MongoDbFactory getRequiredDbFactory() { + private MongoDatabaseFactory getRequiredDbFactory() { - Assert.state(dbFactory != null, - "MongoTransactionManager operates upon a MongoDbFactory. Did you forget to provide one? It's required."); + Assert.state(databaseFactory != null, + "MongoTransactionManager operates upon a MongoDbFactory; Did you forget to provide one; It's required"); - return dbFactory; + return databaseFactory; } private static MongoTransactionObject extractMongoTransaction(Object transaction) { @@ -360,7 +370,7 @@ private static String debugString(@Nullable ClientSession session) { debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent()); debugString += String.format("txActive = %s, ", session.hasActiveTransaction()); debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber()); - debugString += String.format("closed = %d, ", session.getServerSession().isClosed()); + debugString += String.format("closed = %b, ", session.getServerSession().isClosed()); debugString += String.format("clusterTime = %s", session.getClusterTime()); } else { debugString += "id = n/a"; @@ -386,7 +396,7 @@ private static String debugString(@Nullable ClientSession session) { * @since 2.1 * @see MongoResourceHolder */ - static class MongoTransactionObject implements SmartTransactionObject { + protected static class MongoTransactionObject implements SmartTransactionObject { private @Nullable MongoResourceHolder resourceHolder; @@ -406,7 +416,7 @@ void setResourceHolder(@Nullable MongoResourceHolder resourceHolder) { /** * @return {@literal true} if a {@link MongoResourceHolder} is set. */ - boolean hasResourceHolder() { + final boolean hasResourceHolder() { return resourceHolder != null; } @@ -428,14 +438,14 @@ void startTransaction(@Nullable TransactionOptions options) { /** * Commit the transaction. */ - void commitTransaction() { + public void commitTransaction() { getRequiredSession().commitTransaction(); } /** * Rollback (abort) the transaction. */ - void abortTransaction() { + public void abortTransaction() { getRequiredSession().abortTransaction(); } @@ -450,37 +460,28 @@ void closeSession() { } } - @Nullable - ClientSession getSession() { + public @Nullable ClientSession getSession() { return resourceHolder != null ? resourceHolder.getSession() : null; } private MongoResourceHolder getRequiredResourceHolder() { - Assert.state(resourceHolder != null, "MongoResourceHolder is required but not present. o_O"); + Assert.state(resourceHolder != null, "MongoResourceHolder is required but not present; o_O"); return resourceHolder; } private ClientSession getRequiredSession() { ClientSession session = getSession(); - Assert.state(session != null, "A Session is required but it turned out to be null."); + Assert.state(session != null, "A Session is required but it turned out to be null"); return session; } - /* - * (non-Javadoc) - * @see org.springframework.transaction.support.SmartTransactionObject#isRollbackOnly() - */ @Override public boolean isRollbackOnly() { return this.resourceHolder != null && this.resourceHolder.isRollbackOnly(); } - /* - * (non-Javadoc) - * @see org.springframework.transaction.support.SmartTransactionObject#flush() - */ @Override public void flush() { TransactionSynchronizationUtils.triggerFlush(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptions.java new file mode 100644 index 0000000000..04bcd36e35 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptions.java @@ -0,0 +1,193 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.time.Duration; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; + +import org.jspecify.annotations.Nullable; +import org.springframework.data.mongodb.core.ReadConcernAware; +import org.springframework.data.mongodb.core.ReadPreferenceAware; +import org.springframework.data.mongodb.core.WriteConcernAware; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.TransactionOptions; +import com.mongodb.WriteConcern; +import org.springframework.lang.Contract; + +/** + * Options to be applied within a specific transaction scope. + * + * @author Christoph Strobl + * @since 4.3 + */ +public interface MongoTransactionOptions + extends TransactionMetadata, ReadConcernAware, ReadPreferenceAware, WriteConcernAware { + + /** + * Value Object representing empty options enforcing client defaults. Returns {@literal null} for all getter methods. + */ + MongoTransactionOptions NONE = new MongoTransactionOptions() { + + @Override + public @Nullable Duration getMaxCommitTime() { + return null; + } + + @Override + public @Nullable ReadConcern getReadConcern() { + return null; + } + + @Override + public @Nullable ReadPreference getReadPreference() { + return null; + } + + @Override + public @Nullable WriteConcern getWriteConcern() { + return null; + } + }; + + /** + * Merge current options with given ones. Will return first non {@literal null} value from getters whereas the + * {@literal this} has precedence over the given fallbackOptions. + * + * @param fallbackOptions can be {@literal null}. + * @return new instance of {@link MongoTransactionOptions} or this if {@literal fallbackOptions} is {@literal null} or + * {@link #NONE}. + */ + @Contract("null -> this") + default MongoTransactionOptions mergeWith(@Nullable MongoTransactionOptions fallbackOptions) { + + if (fallbackOptions == null || MongoTransactionOptions.NONE.equals(fallbackOptions)) { + return this; + } + + return new MongoTransactionOptions() { + + @Override + public @Nullable Duration getMaxCommitTime() { + return MongoTransactionOptions.this.hasMaxCommitTime() ? MongoTransactionOptions.this.getMaxCommitTime() + : fallbackOptions.getMaxCommitTime(); + } + + @Override + public @Nullable ReadConcern getReadConcern() { + return MongoTransactionOptions.this.hasReadConcern() ? MongoTransactionOptions.this.getReadConcern() + : fallbackOptions.getReadConcern(); + } + + @Override + public @Nullable ReadPreference getReadPreference() { + return MongoTransactionOptions.this.hasReadPreference() ? MongoTransactionOptions.this.getReadPreference() + : fallbackOptions.getReadPreference(); + } + + @Override + public @Nullable WriteConcern getWriteConcern() { + return MongoTransactionOptions.this.hasWriteConcern() ? MongoTransactionOptions.this.getWriteConcern() + : fallbackOptions.getWriteConcern(); + } + }; + } + + /** + * Apply the current options using the given mapping {@link Function} and return its result. + * + * @param mappingFunction + * @return result of the mapping function. + */ + default T map(Function mappingFunction) { + return mappingFunction.apply(this); + } + + /** + * @return MongoDB driver native {@link TransactionOptions}. + * @see MongoTransactionOptions#map(Function) + */ + @SuppressWarnings("NullAway") + default @Nullable TransactionOptions toDriverOptions() { + + return map(it -> { + + if (MongoTransactionOptions.NONE.equals(it)) { + return null; + } + + TransactionOptions.Builder builder = TransactionOptions.builder(); + if (it.hasMaxCommitTime()) { + builder.maxCommitTime(it.getMaxCommitTime().toMillis(), TimeUnit.MILLISECONDS); + } + if (it.hasReadConcern()) { + builder.readConcern(it.getReadConcern()); + } + if (it.hasReadPreference()) { + builder.readPreference(it.getReadPreference()); + } + if (it.hasWriteConcern()) { + builder.writeConcern(it.getWriteConcern()); + } + return builder.build(); + }); + } + + /** + * Factory method to wrap given MongoDB driver native {@link TransactionOptions} into {@link MongoTransactionOptions}. + * + * @param options can be {@literal null}. + * @return {@link MongoTransactionOptions#NONE} if given object is {@literal null}. + */ + static MongoTransactionOptions of(@Nullable TransactionOptions options) { + + if (options == null) { + return NONE; + } + + return new MongoTransactionOptions() { + + @Override + public @Nullable Duration getMaxCommitTime() { + + Long millis = options.getMaxCommitTime(TimeUnit.MILLISECONDS); + return millis != null ? Duration.ofMillis(millis) : null; + } + + @Override + public @Nullable ReadConcern getReadConcern() { + return options.getReadConcern(); + } + + @Override + public @Nullable ReadPreference getReadPreference() { + return options.getReadPreference(); + } + + @Override + public @Nullable WriteConcern getWriteConcern() { + return options.getWriteConcern(); + } + + @Override + public @Nullable TransactionOptions toDriverOptions() { + return options; + } + }; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptionsResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptionsResolver.java new file mode 100644 index 0000000000..c4bdbcca53 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/MongoTransactionOptionsResolver.java @@ -0,0 +1,114 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.util.Map; +import java.util.stream.Collectors; + +import org.jspecify.annotations.Nullable; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.interceptor.TransactionAttribute; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +/** + * A {@link TransactionOptionResolver} reading MongoDB specific {@link MongoTransactionOptions transaction options} from + * a {@link TransactionDefinition}. Implementations of {@link MongoTransactionOptions} may choose a specific + * {@link #getLabelPrefix() prefix} for {@link TransactionAttribute#getLabels() transaction attribute labels} to avoid + * evaluating non-store specific ones. + *

+ * {@link TransactionAttribute#getLabels()} evaluated by default should follow the property style using {@code =} to + * separate key and value pairs. + *

+ * By default {@link #resolve(TransactionDefinition)} will filter labels by the {@link #getLabelPrefix() prefix} and + * strip the prefix from the label before handing the pruned {@link Map} to the {@link #convert(Map)} function. + *

+ * A transaction definition with labels targeting MongoDB may look like the following: + *

+ * + * @Transactional(label = { "mongo:readConcern=majority" }) + * + * + * @author Christoph Strobl + * @since 4.3 + */ +public interface MongoTransactionOptionsResolver extends TransactionOptionResolver { + + /** + * Obtain the default {@link MongoTransactionOptionsResolver} implementation using a {@literal mongo:} + * {@link #getLabelPrefix() prefix}. + * + * @return instance of default {@link MongoTransactionOptionsResolver} implementation. + */ + static MongoTransactionOptionsResolver defaultResolver() { + return DefaultMongoTransactionOptionsResolver.INSTANCE; + } + + /** + * Get the prefix used to filter applicable {@link TransactionAttribute#getLabels() labels}. + * + * @return {@literal null} if no label defined. + */ + @Nullable + String getLabelPrefix(); + + /** + * Resolve {@link MongoTransactionOptions} from a given {@link TransactionDefinition} by evaluating + * {@link TransactionAttribute#getLabels()} labels if possible. + *

+ * Splits applicable labels property style using {@literal =} as deliminator and removes a potential + * {@link #getLabelPrefix() prefix} before calling {@link #convert(Map)} with filtered label values. + * + * @param definition + * @return {@link MongoTransactionOptions#NONE} in case the given {@link TransactionDefinition} is not a + * {@link TransactionAttribute} if no matching {@link TransactionAttribute#getLabels() labels} could be found. + * @throws IllegalArgumentException for options that do not map to valid transactions options or malformatted labels. + */ + @Override + default MongoTransactionOptions resolve(TransactionDefinition definition) { + + if (!(definition instanceof TransactionAttribute attribute)) { + return MongoTransactionOptions.NONE; + } + + if (attribute.getLabels().isEmpty()) { + return MongoTransactionOptions.NONE; + } + + Map attributeMap = attribute.getLabels().stream() + .filter(it -> !StringUtils.hasText(getLabelPrefix()) || it.startsWith(getLabelPrefix())) + .map(it -> StringUtils.hasText(getLabelPrefix()) ? it.substring(getLabelPrefix().length()) : it).map(it -> { + + String[] kvPair = StringUtils.split(it, "="); + Assert.isTrue(kvPair != null && kvPair.length == 2, + () -> "No value present for transaction option %s".formatted(kvPair != null ? kvPair[0] : it)); + return kvPair; + }) + + .collect(Collectors.toMap(it -> it[0].trim(), it -> it[1].trim())); + + return attributeMap.isEmpty() ? MongoTransactionOptions.NONE : convert(attributeMap); + } + + /** + * Convert the given {@link Map} into an instance of {@link MongoTransactionOptions}. + * + * @param options never {@literal null}. + * @return never {@literal null}. + * @throws IllegalArgumentException for invalid options. + */ + MongoTransactionOptions convert(Map options); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java index 69d606c842..f2a6714a95 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseFactory.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -13,7 +13,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package org.springframework.data.mongodb; import reactor.core.publisher.Mono; @@ -32,6 +31,7 @@ * * @author Mark Paluch * @author Christoph Strobl + * @author Mathieu Ouellet * @since 2.0 */ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider { @@ -39,19 +39,19 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider { /** * Creates a default {@link MongoDatabase} instance. * - * @return + * @return never {@literal null}. * @throws DataAccessException */ - MongoDatabase getMongoDatabase() throws DataAccessException; + Mono getMongoDatabase() throws DataAccessException; /** - * Creates a {@link MongoDatabase} instance to access the database with the given name. + * Obtain a {@link MongoDatabase} instance to access the database with the given name. * * @param dbName must not be {@literal null} or empty. - * @return + * @return never {@literal null}. * @throws DataAccessException */ - MongoDatabase getMongoDatabase(String dbName) throws DataAccessException; + Mono getMongoDatabase(String dbName) throws DataAccessException; /** * Exposes a shared {@link MongoExceptionTranslator}. @@ -65,10 +65,7 @@ public interface ReactiveMongoDatabaseFactory extends CodecRegistryProvider { * * @return never {@literal null}. */ - @Override - default CodecRegistry getCodecRegistry() { - return getMongoDatabase().getCodecRegistry(); - } + CodecRegistry getCodecRegistry(); /** * Obtain a {@link Mono} emitting a {@link ClientSession} for given {@link ClientSessionOptions options}. @@ -88,4 +85,16 @@ default CodecRegistry getCodecRegistry() { * @since 2.1 */ ReactiveMongoDatabaseFactory withSession(ClientSession session); + + /** + * Returns if the given {@link ReactiveMongoDatabaseFactory} is bound to a + * {@link com.mongodb.reactivestreams.client.ClientSession} that has an + * {@link com.mongodb.reactivestreams.client.ClientSession#hasActiveTransaction() active transaction}. + * + * @return {@literal true} if there's an active transaction, {@literal false} otherwise. + * @since 2.2 + */ + default boolean isTransactionActive() { + return false; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java new file mode 100644 index 0000000000..3d1c2ee89c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoDatabaseUtils.java @@ -0,0 +1,264 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import reactor.core.publisher.Mono; +import reactor.util.context.Context; + +import org.jspecify.annotations.Nullable; +import org.springframework.transaction.NoTransactionException; +import org.springframework.transaction.reactive.ReactiveResourceSynchronization; +import org.springframework.transaction.reactive.TransactionSynchronization; +import org.springframework.transaction.reactive.TransactionSynchronizationManager; +import org.springframework.transaction.support.ResourceHolderSynchronization; +import org.springframework.util.Assert; +import org.springframework.util.StringUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; + +/** + * Helper class for managing reactive {@link MongoDatabase} instances via {@link ReactiveMongoDatabaseFactory}. Used for + * obtaining {@link ClientSession session bound} resources, such as {@link MongoDatabase} and {@link MongoCollection} + * suitable for transactional usage.
+ * Note: Intended for internal usage only. + * + * @author Mark Paluch + * @author Christoph Strobl + * @author Mathieu Ouellet + * @since 2.2 + */ +public class ReactiveMongoDatabaseUtils { + + /** + * Check if the {@link ReactiveMongoDatabaseFactory} is actually bound to a + * {@link com.mongodb.reactivestreams.client.ClientSession} that has an active transaction, or if a + * {@link org.springframework.transaction.reactive.TransactionSynchronization} has been registered for the + * {@link ReactiveMongoDatabaseFactory resource} and if the associated + * {@link com.mongodb.reactivestreams.client.ClientSession} has an + * {@link com.mongodb.reactivestreams.client.ClientSession#hasActiveTransaction() active transaction}. + * + * @param databaseFactory the resource to check transactions for. Must not be {@literal null}. + * @return a {@link Mono} emitting {@literal true} if the factory has an ongoing transaction. + */ + public static Mono isTransactionActive(ReactiveMongoDatabaseFactory databaseFactory) { + + if (databaseFactory.isTransactionActive()) { + return Mono.just(true); + } + + return TransactionSynchronizationManager.forCurrentTransaction() // + .map(it -> { + + ReactiveMongoResourceHolder holder = (ReactiveMongoResourceHolder) it.getResource(databaseFactory); + return holder != null && holder.hasActiveTransaction(); + }) // + .onErrorResume(NoTransactionException.class, e -> Mono.just(false)); + } + + /** + * Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory} using + * {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber + * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static Mono getDatabase(ReactiveMongoDatabaseFactory factory) { + return doGetMongoDatabase(null, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION); + } + + /** + * Obtain the default {@link MongoDatabase database} form the given {@link ReactiveMongoDatabaseFactory factory}. + *
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber + * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @param sessionSynchronization the synchronization to use. Must not be {@literal null}. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static Mono getDatabase(ReactiveMongoDatabaseFactory factory, + SessionSynchronization sessionSynchronization) { + return doGetMongoDatabase(null, factory, sessionSynchronization); + } + + /** + * Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory + * factory} using {@link SessionSynchronization#ON_ACTUAL_TRANSACTION native session synchronization}.
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber + * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param dbName the name of the {@link MongoDatabase} to get. If {@literal null} the default database of the + * {@link ReactiveMongoDatabaseFactory}. + * @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static Mono getDatabase(@Nullable String dbName, ReactiveMongoDatabaseFactory factory) { + return doGetMongoDatabase(dbName, factory, SessionSynchronization.ON_ACTUAL_TRANSACTION); + } + + /** + * Obtain the {@link MongoDatabase database} with given name form the given {@link ReactiveMongoDatabaseFactory + * factory}.
+ * Registers a {@link MongoSessionSynchronization MongoDB specific transaction synchronization} within the subscriber + * {@link Context} if {@link TransactionSynchronizationManager#isSynchronizationActive() synchronization is active}. + * + * @param dbName the name of the {@link MongoDatabase} to get. If {@literal null} the default database of the * + * {@link ReactiveMongoDatabaseFactory}. + * @param factory the {@link ReactiveMongoDatabaseFactory} to get the {@link MongoDatabase} from. + * @param sessionSynchronization the synchronization to use. Must not be {@literal null}. + * @return the {@link MongoDatabase} that is potentially associated with a transactional {@link ClientSession}. + */ + public static Mono getDatabase(@Nullable String dbName, ReactiveMongoDatabaseFactory factory, + SessionSynchronization sessionSynchronization) { + return doGetMongoDatabase(dbName, factory, sessionSynchronization); + } + + private static Mono doGetMongoDatabase(@Nullable String dbName, ReactiveMongoDatabaseFactory factory, + SessionSynchronization sessionSynchronization) { + + Assert.notNull(factory, "DatabaseFactory must not be null"); + + if (sessionSynchronization == SessionSynchronization.NEVER) { + return getMongoDatabaseOrDefault(dbName, factory); + } + + return TransactionSynchronizationManager.forCurrentTransaction() + .filter(TransactionSynchronizationManager::isSynchronizationActive) // + .flatMap(synchronizationManager -> { + + return doGetSession(synchronizationManager, factory, sessionSynchronization) // + .flatMap(it -> getMongoDatabaseOrDefault(dbName, factory.withSession(it))); + }) // + .onErrorResume(NoTransactionException.class, e -> getMongoDatabaseOrDefault(dbName, factory)) + .switchIfEmpty(getMongoDatabaseOrDefault(dbName, factory)); + } + + private static Mono getMongoDatabaseOrDefault(@Nullable String dbName, + ReactiveMongoDatabaseFactory factory) { + return StringUtils.hasText(dbName) ? factory.getMongoDatabase(dbName) : factory.getMongoDatabase(); + } + + private static Mono doGetSession(TransactionSynchronizationManager synchronizationManager, + ReactiveMongoDatabaseFactory dbFactory, SessionSynchronization sessionSynchronization) { + + final ReactiveMongoResourceHolder registeredHolder = (ReactiveMongoResourceHolder) synchronizationManager + .getResource(dbFactory); + + // check for native MongoDB transaction + if (registeredHolder != null + && (registeredHolder.hasSession() || registeredHolder.isSynchronizedWithTransaction())) { + + return registeredHolder.hasSession() ? Mono.just(registeredHolder.getSession()) + : createClientSession(dbFactory).map(registeredHolder::setSessionIfAbsent); + } + + if (SessionSynchronization.ON_ACTUAL_TRANSACTION.equals(sessionSynchronization)) { + return Mono.empty(); + } + + // init a non native MongoDB transaction by registering a MongoSessionSynchronization + return createClientSession(dbFactory).map(session -> { + + ReactiveMongoResourceHolder newHolder = new ReactiveMongoResourceHolder(session, dbFactory); + newHolder.getRequiredSession().startTransaction(); + + synchronizationManager + .registerSynchronization(new MongoSessionSynchronization(synchronizationManager, newHolder, dbFactory)); + newHolder.setSynchronizedWithTransaction(true); + synchronizationManager.bindResource(dbFactory, newHolder); + + return newHolder.getSession(); + }); + } + + private static Mono createClientSession(ReactiveMongoDatabaseFactory dbFactory) { + return dbFactory.getSession(ClientSessionOptions.builder().causallyConsistent(true).build()); + } + + /** + * MongoDB specific {@link ResourceHolderSynchronization} for resource cleanup at the end of a transaction when + * participating in a non-native MongoDB transaction, such as a R2CBC transaction. + * + * @author Mark Paluch + * @since 2.2 + */ + private static class MongoSessionSynchronization + extends ReactiveResourceSynchronization { + + private final ReactiveMongoResourceHolder resourceHolder; + + MongoSessionSynchronization(TransactionSynchronizationManager synchronizationManager, + ReactiveMongoResourceHolder resourceHolder, ReactiveMongoDatabaseFactory dbFactory) { + + super(resourceHolder, dbFactory, synchronizationManager); + this.resourceHolder = resourceHolder; + } + + @Override + protected boolean shouldReleaseBeforeCompletion() { + return false; + } + + @Override + protected Mono processResourceAfterCommit(ReactiveMongoResourceHolder resourceHolder) { + + if (isTransactionActive(resourceHolder)) { + return Mono.from(resourceHolder.getRequiredSession().commitTransaction()); + } + + return Mono.empty(); + } + + @Override + public Mono afterCompletion(int status) { + + return Mono.defer(() -> { + + if (status == TransactionSynchronization.STATUS_ROLLED_BACK && isTransactionActive(this.resourceHolder)) { + + return Mono.from(resourceHolder.getRequiredSession().abortTransaction()) // + .then(super.afterCompletion(status)); + } + + return super.afterCompletion(status); + }); + } + + @Override + protected Mono releaseResource(ReactiveMongoResourceHolder resourceHolder, Object resourceKey) { + + return Mono.fromRunnable(() -> { + if (resourceHolder.hasActiveSession()) { + resourceHolder.getRequiredSession().close(); + } + }); + } + + private boolean isTransactionActive(ReactiveMongoResourceHolder resourceHolder) { + + if (!resourceHolder.hasSession()) { + return false; + } + + return resourceHolder.getRequiredSession().hasActiveTransaction(); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java new file mode 100644 index 0000000000..d01364b202 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoResourceHolder.java @@ -0,0 +1,153 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.jspecify.annotations.Nullable; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; +import org.springframework.transaction.support.ResourceHolderSupport; + +import com.mongodb.reactivestreams.client.ClientSession; + +/** + * MongoDB specific resource holder, wrapping a {@link ClientSession}. {@link ReactiveMongoTransactionManager} binds + * instances of this class to the subscriber context.
+ * Note: Intended for internal usage only. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 2.2 + * @see ReactiveMongoTransactionManager + * @see ReactiveMongoTemplate + */ +class ReactiveMongoResourceHolder extends ResourceHolderSupport { + + private @Nullable ClientSession session; + private ReactiveMongoDatabaseFactory databaseFactory; + + /** + * Create a new {@link ReactiveMongoResourceHolder} for a given {@link ClientSession session}. + * + * @param session the associated {@link ClientSession}. Can be {@literal null}. + * @param databaseFactory the associated {@link MongoDatabaseFactory}. must not be {@literal null}. + */ + ReactiveMongoResourceHolder(@Nullable ClientSession session, ReactiveMongoDatabaseFactory databaseFactory) { + + this.session = session; + this.databaseFactory = databaseFactory; + } + + /** + * @return the associated {@link ClientSession}. Can be {@literal null}. + */ + @Nullable + ClientSession getSession() { + return session; + } + + /** + * @return the required associated {@link ClientSession}. + * @throws IllegalStateException if no session is associated. + */ + ClientSession getRequiredSession() { + + ClientSession session = getSession(); + + if (session == null) { + throw new IllegalStateException("No ClientSession associated"); + } + return session; + } + + /** + * @return the associated {@link ReactiveMongoDatabaseFactory}. + */ + public ReactiveMongoDatabaseFactory getDatabaseFactory() { + return databaseFactory; + } + + /** + * Set the {@link ClientSession} to guard. + * + * @param session can be {@literal null}. + */ + public void setSession(@Nullable ClientSession session) { + this.session = session; + } + + /** + * @return {@literal true} if session is not {@literal null}. + */ + boolean hasSession() { + return session != null; + } + + /** + * If the {@link ReactiveMongoResourceHolder} is {@link #hasSession() not already associated} with a + * {@link ClientSession} the given value is {@link #setSession(ClientSession) set} and returned, otherwise the current + * bound session is returned. + * + * @param session + * @return + */ + public @Nullable ClientSession setSessionIfAbsent(@Nullable ClientSession session) { + + if (!hasSession()) { + setSession(session); + } + + return session; + } + + /** + * @return {@literal true} if the session is active and has not been closed. + */ + boolean hasActiveSession() { + + if (!hasSession()) { + return false; + } + + return hasServerSession() && !getRequiredSession().getServerSession().isClosed(); + } + + /** + * @return {@literal true} if the session has an active transaction. + * @see #hasActiveSession() + */ + boolean hasActiveTransaction() { + + if (!hasActiveSession()) { + return false; + } + + return getRequiredSession().hasActiveTransaction(); + } + + /** + * @return {@literal true} if the {@link ClientSession} has a {@link com.mongodb.session.ServerSession} associated + * that is accessible via {@link ClientSession#getServerSession()}. + */ + boolean hasServerSession() { + + try { + return getRequiredSession().getServerSession() != null; + } catch (IllegalStateException serverSessionClosed) { + // ignore + } + + return false; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java new file mode 100644 index 0000000000..4f293c8ed6 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/ReactiveMongoTransactionManager.java @@ -0,0 +1,502 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import reactor.core.publisher.Mono; + +import org.jspecify.annotations.Nullable; +import org.springframework.beans.factory.InitializingBean; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.TransactionException; +import org.springframework.transaction.TransactionSystemException; +import org.springframework.transaction.reactive.AbstractReactiveTransactionManager; +import org.springframework.transaction.reactive.GenericReactiveTransaction; +import org.springframework.transaction.reactive.TransactionSynchronizationManager; +import org.springframework.transaction.support.SmartTransactionObject; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +import com.mongodb.ClientSessionOptions; +import com.mongodb.MongoException; +import com.mongodb.TransactionOptions; +import com.mongodb.reactivestreams.client.ClientSession; + +/** + * A {@link org.springframework.transaction.ReactiveTransactionManager} implementation that manages + * {@link com.mongodb.reactivestreams.client.ClientSession} based transactions for a single + * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory}.
+ * Binds a {@link ClientSession} from the specified + * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory} to the subscriber + * {@link reactor.util.context.Context}.
+ * {@link org.springframework.transaction.TransactionDefinition#isReadOnly() Readonly} transactions operate on a + * {@link ClientSession} and enable causal consistency, and also {@link ClientSession#startTransaction() start}, + * {@link com.mongodb.reactivestreams.client.ClientSession#commitTransaction() commit} or + * {@link ClientSession#abortTransaction() abort} a transaction.
+ * Application code is required to retrieve the {@link com.mongodb.reactivestreams.client.MongoDatabase} via + * {@link org.springframework.data.mongodb.ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory)} instead + * of a standard {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase()} call. Spring + * classes such as {@link org.springframework.data.mongodb.core.ReactiveMongoTemplate} use this strategy implicitly. + *
+ * By default failure of a {@literal commit} operation raises a {@link TransactionSystemException}. You can override + * {@link #doCommit(TransactionSynchronizationManager, ReactiveMongoTransactionObject)} to implement the + * Retry Commit Operation + * behavior as outlined in the MongoDB reference manual. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + * @see MongoDB Transaction Documentation + * @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization) + */ +public class ReactiveMongoTransactionManager extends AbstractReactiveTransactionManager implements InitializingBean { + + private @Nullable ReactiveMongoDatabaseFactory databaseFactory; + private MongoTransactionOptions options; + private final MongoTransactionOptionsResolver transactionOptionsResolver; + + /** + * Create a new {@link ReactiveMongoTransactionManager} for bean-style usage.
+ * Note:The {@link org.springframework.data.mongodb.ReactiveMongoDatabaseFactory db factory} has to + * be {@link #setDatabaseFactory(ReactiveMongoDatabaseFactory)} set} before using the instance. Use this constructor + * to prepare a {@link ReactiveMongoTransactionManager} via a {@link org.springframework.beans.factory.BeanFactory}. + *
+ * Optionally it is possible to set default {@link TransactionOptions transaction options} defining + * {@link com.mongodb.ReadConcern} and {@link com.mongodb.WriteConcern}. + * + * @see #setDatabaseFactory(ReactiveMongoDatabaseFactory) + */ + public ReactiveMongoTransactionManager() { + + this.transactionOptionsResolver = MongoTransactionOptionsResolver.defaultResolver(); + this.options = MongoTransactionOptions.NONE; + } + + /** + * Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given + * {@link ReactiveMongoDatabaseFactory}. + * + * @param databaseFactory must not be {@literal null}. + */ + public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory) { + this(databaseFactory, null); + } + + /** + * Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given + * {@link ReactiveMongoDatabaseFactory} applying the given {@link TransactionOptions options}, if present, when + * starting a new transaction. + * + * @param databaseFactory must not be {@literal null}. + * @param options can be {@literal null}. Will default {@link MongoTransactionOptions#NONE} if {@literal null}. + */ + public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory, + @Nullable TransactionOptions options) { + this(databaseFactory, MongoTransactionOptionsResolver.defaultResolver(), MongoTransactionOptions.of(options)); + } + + /** + * Create a new {@link ReactiveMongoTransactionManager} obtaining sessions from the given + * {@link ReactiveMongoDatabaseFactory} applying the given {@link TransactionOptions options}, if present, when + * starting a new transaction. + * + * @param databaseFactory must not be {@literal null}. + * @param transactionOptionsResolver must not be {@literal null}. + * @param defaultTransactionOptions can be {@literal null}. Will default {@link MongoTransactionOptions#NONE} if + * {@literal null}. + * @since 4.3 + */ + public ReactiveMongoTransactionManager(ReactiveMongoDatabaseFactory databaseFactory, + MongoTransactionOptionsResolver transactionOptionsResolver, + @Nullable MongoTransactionOptions defaultTransactionOptions) { + + Assert.notNull(databaseFactory, "DatabaseFactory must not be null"); + Assert.notNull(transactionOptionsResolver, "MongoTransactionOptionsResolver must not be null"); + + this.databaseFactory = databaseFactory; + this.transactionOptionsResolver = transactionOptionsResolver; + this.options = defaultTransactionOptions != null ? defaultTransactionOptions : MongoTransactionOptions.NONE; + } + + @Override + protected Object doGetTransaction(TransactionSynchronizationManager synchronizationManager) + throws TransactionException { + + ReactiveMongoResourceHolder resourceHolder = (ReactiveMongoResourceHolder) synchronizationManager + .getResource(getRequiredDatabaseFactory()); + return new ReactiveMongoTransactionObject(resourceHolder); + } + + @Override + protected boolean isExistingTransaction(Object transaction) throws TransactionException { + return extractMongoTransaction(transaction).hasResourceHolder(); + } + + @Override + protected Mono doBegin(TransactionSynchronizationManager synchronizationManager, Object transaction, + TransactionDefinition definition) throws TransactionException { + + return Mono.defer(() -> { + + ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction); + + Mono holder = newResourceHolder(definition, + ClientSessionOptions.builder().causallyConsistent(true).build()); + + return holder.doOnNext(resourceHolder -> { + + mongoTransactionObject.setResourceHolder(resourceHolder); + + if (logger.isDebugEnabled()) { + logger.debug( + String.format("About to start transaction for session %s.", debugString(resourceHolder.getSession()))); + } + + }).doOnNext(resourceHolder -> { + + MongoTransactionOptions mongoTransactionOptions = transactionOptionsResolver.resolve(definition) + .mergeWith(options); + mongoTransactionObject.startTransaction(mongoTransactionOptions.toDriverOptions()); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("Started transaction for session %s.", debugString(resourceHolder.getSession()))); + } + + })// + .onErrorMap( + ex -> new TransactionSystemException(String.format("Could not start Mongo transaction for session %s.", + debugString(mongoTransactionObject.getSession())), ex)) + .doOnSuccess(resourceHolder -> { + + synchronizationManager.bindResource(getRequiredDatabaseFactory(), resourceHolder); + }).then(); + }); + } + + @Override + protected Mono doSuspend(TransactionSynchronizationManager synchronizationManager, Object transaction) + throws TransactionException { + + return Mono.fromSupplier(() -> { + + ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(transaction); + mongoTransactionObject.setResourceHolder(null); + + return synchronizationManager.unbindResource(getRequiredDatabaseFactory()); + }); + } + + @Override + protected Mono doResume(TransactionSynchronizationManager synchronizationManager, @Nullable Object transaction, + Object suspendedResources) { + return Mono + .fromRunnable(() -> synchronizationManager.bindResource(getRequiredDatabaseFactory(), suspendedResources)); + } + + @Override + protected final Mono doCommit(TransactionSynchronizationManager synchronizationManager, + GenericReactiveTransaction status) throws TransactionException { + + return Mono.defer(() -> { + + ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(status); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("About to commit transaction for session %s.", + debugString(mongoTransactionObject.getSession()))); + } + + return doCommit(synchronizationManager, mongoTransactionObject).onErrorMap(ex -> { + return new TransactionSystemException(String.format("Could not commit Mongo transaction for session %s.", + debugString(mongoTransactionObject.getSession())), ex); + }); + }); + } + + /** + * Customization hook to perform an actual commit of the given transaction.
+ * If a commit operation encounters an error, the MongoDB driver throws a {@link MongoException} holding + * {@literal error labels}.
+ * By default those labels are ignored, nevertheless one might check for + * {@link MongoException#UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL transient commit errors labels} and retry the the + * commit. + * + * @param synchronizationManager reactive synchronization manager. + * @param transactionObject never {@literal null}. + */ + protected Mono doCommit(TransactionSynchronizationManager synchronizationManager, + ReactiveMongoTransactionObject transactionObject) { + return transactionObject.commitTransaction(); + } + + @Override + protected Mono doRollback(TransactionSynchronizationManager synchronizationManager, + GenericReactiveTransaction status) { + + return Mono.defer(() -> { + + ReactiveMongoTransactionObject mongoTransactionObject = extractMongoTransaction(status); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("About to abort transaction for session %s.", + debugString(mongoTransactionObject.getSession()))); + } + + return mongoTransactionObject.abortTransaction().onErrorResume(MongoException.class, ex -> { + return Mono + .error(new TransactionSystemException(String.format("Could not abort Mongo transaction for session %s.", + debugString(mongoTransactionObject.getSession())), ex)); + }); + }); + } + + @Override + protected Mono doSetRollbackOnly(TransactionSynchronizationManager synchronizationManager, + GenericReactiveTransaction status) throws TransactionException { + + return Mono.fromRunnable(() -> { + ReactiveMongoTransactionObject transactionObject = extractMongoTransaction(status); + transactionObject.getRequiredResourceHolder().setRollbackOnly(); + }); + } + + @Override + protected Mono doCleanupAfterCompletion(TransactionSynchronizationManager synchronizationManager, + Object transaction) { + + Assert.isInstanceOf(ReactiveMongoTransactionObject.class, transaction, + () -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class, + transaction.getClass())); + + return Mono.fromRunnable(() -> { + ReactiveMongoTransactionObject mongoTransactionObject = (ReactiveMongoTransactionObject) transaction; + + // Remove the connection holder from the thread. + synchronizationManager.unbindResource(getRequiredDatabaseFactory()); + mongoTransactionObject.getRequiredResourceHolder().clear(); + + if (logger.isDebugEnabled()) { + logger.debug(String.format("About to release Session %s after transaction.", + debugString(mongoTransactionObject.getSession()))); + } + + mongoTransactionObject.closeSession(); + }); + } + + /** + * Set the {@link ReactiveMongoDatabaseFactory} that this instance should manage transactions for. + * + * @param databaseFactory must not be {@literal null}. + */ + public void setDatabaseFactory(ReactiveMongoDatabaseFactory databaseFactory) { + + Assert.notNull(databaseFactory, "DatabaseFactory must not be null"); + this.databaseFactory = databaseFactory; + } + + /** + * Set the {@link TransactionOptions} to be applied when starting transactions. + * + * @param options can be {@literal null}. + */ + public void setOptions(@Nullable TransactionOptions options) { + this.options = MongoTransactionOptions.of(options); + } + + /** + * Get the {@link ReactiveMongoDatabaseFactory} that this instance manages transactions for. + * + * @return can be {@literal null}. + */ + public @Nullable ReactiveMongoDatabaseFactory getDatabaseFactory() { + return databaseFactory; + } + + @Override + public void afterPropertiesSet() { + getRequiredDatabaseFactory(); + } + + private Mono newResourceHolder(TransactionDefinition definition, + ClientSessionOptions options) { + + ReactiveMongoDatabaseFactory dbFactory = getRequiredDatabaseFactory(); + + return dbFactory.getSession(options).map(session -> new ReactiveMongoResourceHolder(session, dbFactory)); + } + + /** + * @throws IllegalStateException if {@link #databaseFactory} is {@literal null}. + */ + private ReactiveMongoDatabaseFactory getRequiredDatabaseFactory() { + + Assert.state(databaseFactory != null, + "ReactiveMongoTransactionManager operates upon a ReactiveMongoDatabaseFactory; Did you forget to provide one; It's required"); + + return databaseFactory; + } + + private static ReactiveMongoTransactionObject extractMongoTransaction(Object transaction) { + + Assert.isInstanceOf(ReactiveMongoTransactionObject.class, transaction, + () -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class, + transaction.getClass())); + + return (ReactiveMongoTransactionObject) transaction; + } + + private static ReactiveMongoTransactionObject extractMongoTransaction(GenericReactiveTransaction status) { + + Assert.isInstanceOf(ReactiveMongoTransactionObject.class, status.getTransaction(), + () -> String.format("Expected to find a %s but it turned out to be %s.", ReactiveMongoTransactionObject.class, + status.getTransaction().getClass())); + + return (ReactiveMongoTransactionObject) status.getTransaction(); + } + + private static String debugString(@Nullable ClientSession session) { + + if (session == null) { + return "null"; + } + + String debugString = String.format("[%s@%s ", ClassUtils.getShortName(session.getClass()), + Integer.toHexString(session.hashCode())); + + try { + if (session.getServerSession() != null) { + debugString += String.format("id = %s, ", session.getServerSession().getIdentifier()); + debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent()); + debugString += String.format("txActive = %s, ", session.hasActiveTransaction()); + debugString += String.format("txNumber = %d, ", session.getServerSession().getTransactionNumber()); + debugString += String.format("closed = %b, ", session.getServerSession().isClosed()); + debugString += String.format("clusterTime = %s", session.getClusterTime()); + } else { + debugString += "id = n/a"; + debugString += String.format("causallyConsistent = %s, ", session.isCausallyConsistent()); + debugString += String.format("txActive = %s, ", session.hasActiveTransaction()); + debugString += String.format("clusterTime = %s", session.getClusterTime()); + } + } catch (RuntimeException e) { + debugString += String.format("error = %s", e.getMessage()); + } + + debugString += "]"; + + return debugString; + } + + /** + * MongoDB specific transaction object, representing a {@link MongoResourceHolder}. Used as transaction object by + * {@link ReactiveMongoTransactionManager}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + * @see ReactiveMongoResourceHolder + */ + protected static class ReactiveMongoTransactionObject implements SmartTransactionObject { + + private @Nullable ReactiveMongoResourceHolder resourceHolder; + + ReactiveMongoTransactionObject(@Nullable ReactiveMongoResourceHolder resourceHolder) { + this.resourceHolder = resourceHolder; + } + + /** + * Set the {@link MongoResourceHolder}. + * + * @param resourceHolder can be {@literal null}. + */ + void setResourceHolder(@Nullable ReactiveMongoResourceHolder resourceHolder) { + this.resourceHolder = resourceHolder; + } + + /** + * @return {@literal true} if a {@link MongoResourceHolder} is set. + */ + final boolean hasResourceHolder() { + return resourceHolder != null; + } + + /** + * Start a MongoDB transaction optionally given {@link TransactionOptions}. + * + * @param options can be {@literal null} + */ + void startTransaction(@Nullable TransactionOptions options) { + + ClientSession session = getRequiredSession(); + if (options != null) { + session.startTransaction(options); + } else { + session.startTransaction(); + } + } + + /** + * Commit the transaction. + */ + public Mono commitTransaction() { + return Mono.from(getRequiredSession().commitTransaction()); + } + + /** + * Rollback (abort) the transaction. + */ + public Mono abortTransaction() { + return Mono.from(getRequiredSession().abortTransaction()); + } + + /** + * Close a {@link ClientSession} without regard to its transactional state. + */ + void closeSession() { + + ClientSession session = getRequiredSession(); + if (session.getServerSession() != null && !session.getServerSession().isClosed()) { + session.close(); + } + } + + public @Nullable ClientSession getSession() { + return resourceHolder != null ? resourceHolder.getSession() : null; + } + + private ReactiveMongoResourceHolder getRequiredResourceHolder() { + + Assert.state(resourceHolder != null, "ReactiveMongoResourceHolder is required but not present; o_O"); + return resourceHolder; + } + + private ClientSession getRequiredSession() { + + ClientSession session = getSession(); + Assert.state(session != null, "A Session is required but it turned out to be null"); + return session; + } + + @Override + public boolean isRollbackOnly() { + return this.resourceHolder != null && this.resourceHolder.isRollbackOnly(); + } + + @Override + public void flush() { + throw new UnsupportedOperationException("flush() not supported"); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java index ffa75657eb..ec30478a54 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionAwareMethodInterceptor.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,8 +22,8 @@ import org.aopalliance.intercept.MethodInterceptor; import org.aopalliance.intercept.MethodInvocation; +import org.jspecify.annotations.Nullable; import org.springframework.core.MethodClassKey; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import org.springframework.util.ConcurrentReferenceHashMap; @@ -34,8 +34,7 @@ /** * {@link MethodInterceptor} implementation looking up and invoking an alternative target method having - * {@link ClientSession} as its first argument. This allows seamless integration with the existing code base. - *

+ * {@link ClientSession} as its first argument. This allows seamless integration with the existing code base.
* The {@link MethodInterceptor} is aware of methods on {@code MongoCollection} that my return new instances of itself * like (eg. {@link com.mongodb.reactivestreams.client.MongoCollection#withWriteConcern(WriteConcern)} and decorate them * if not already proxied. @@ -76,13 +75,13 @@ public SessionAwareMethodInterceptor(ClientSession session, T target, Class< Class databaseType, ClientSessionOperator databaseDecorator, Class collectionType, ClientSessionOperator collectionDecorator) { - Assert.notNull(session, "ClientSession must not be null!"); - Assert.notNull(target, "Target must not be null!"); - Assert.notNull(sessionType, "SessionType must not be null!"); - Assert.notNull(databaseType, "Database type must not be null!"); - Assert.notNull(databaseDecorator, "Database ClientSessionOperator must not be null!"); - Assert.notNull(collectionType, "Collection type must not be null!"); - Assert.notNull(collectionDecorator, "Collection ClientSessionOperator must not be null!"); + Assert.notNull(session, "ClientSession must not be null"); + Assert.notNull(target, "Target must not be null"); + Assert.notNull(sessionType, "SessionType must not be null"); + Assert.notNull(databaseType, "Database type must not be null"); + Assert.notNull(databaseDecorator, "Database ClientSessionOperator must not be null"); + Assert.notNull(collectionType, "Collection type must not be null"); + Assert.notNull(collectionDecorator, "Collection ClientSessionOperator must not be null"); this.session = session; this.target = target; @@ -95,17 +94,13 @@ public SessionAwareMethodInterceptor(ClientSession session, T target, Class< this.sessionType = sessionType; } - /* - * (non-Javadoc) - * @see org.aopalliance.intercept.MethodInterceptor(org.aopalliance.intercept.MethodInvocation) - */ - @Nullable @Override - public Object invoke(MethodInvocation methodInvocation) throws Throwable { + public @Nullable Object invoke(MethodInvocation methodInvocation) throws Throwable { if (requiresDecoration(methodInvocation.getMethod())) { Object target = methodInvocation.proceed(); + Assert.notNull(target, "invocation target was null"); if (target instanceof Proxy) { return target; } @@ -139,12 +134,8 @@ protected Object decorate(Object target) { private static boolean requiresSession(Method method) { - if (method.getParameterCount() == 0 - || !ClassUtils.isAssignable(ClientSession.class, method.getParameterTypes()[0])) { - return true; - } - - return false; + return method.getParameterCount() == 0 + || !ClassUtils.isAssignable(ClientSession.class, method.getParameterTypes()[0]); } private static Object[] prependSessionToArguments(ClientSession session, MethodInvocation invocation) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java index 225b79508a..07b5c31586 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SessionSynchronization.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,20 @@ */ package org.springframework.data.mongodb; +import org.springframework.data.mongodb.core.MongoTemplate; +import org.springframework.data.mongodb.core.ReactiveMongoTemplate; + /** - * {@link SessionSynchronization} is used along with {@link org.springframework.data.mongodb.core.MongoTemplate} to - * define in which type of transactions to participate if any. + * {@link SessionSynchronization} is used along with {@code MongoTemplate} to define in which type of transactions to + * participate if any. * * @author Christoph Strobl * @author Mark Paluch * @since 2.1 + * @see MongoTemplate#setSessionSynchronization(SessionSynchronization) + * @see MongoDatabaseUtils#getDatabase(MongoDatabaseFactory, SessionSynchronization) + * @see ReactiveMongoTemplate#setSessionSynchronization(SessionSynchronization) + * @see ReactiveMongoDatabaseUtils#getDatabase(ReactiveMongoDatabaseFactory, SessionSynchronization) */ public enum SessionSynchronization { @@ -34,5 +41,12 @@ public enum SessionSynchronization { /** * Synchronize with native MongoDB transactions initiated via {@link MongoTransactionManager}. */ - ON_ACTUAL_TRANSACTION; + ON_ACTUAL_TRANSACTION, + + /** + * Do not participate in ongoing transactions. + * + * @since 3.2.5 + */ + NEVER } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SimpleMongoTransactionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SimpleMongoTransactionOptions.java new file mode 100644 index 0000000000..5c50ba686a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SimpleMongoTransactionOptions.java @@ -0,0 +1,146 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.time.Duration; +import java.util.Arrays; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.jspecify.annotations.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.Function; +import com.mongodb.ReadConcern; +import com.mongodb.ReadConcernLevel; +import com.mongodb.ReadPreference; +import com.mongodb.WriteConcern; + +/** + * Trivial implementation of {@link MongoTransactionOptions}. + * + * @author Christoph Strobl + * @since 4.3 + */ +class SimpleMongoTransactionOptions implements MongoTransactionOptions { + + static final Set KNOWN_KEYS = Arrays.stream(OptionKey.values()).map(OptionKey::getKey) + .collect(Collectors.toSet()); + + private final @Nullable Duration maxCommitTime; + private final @Nullable ReadConcern readConcern; + private final @Nullable ReadPreference readPreference; + private final @Nullable WriteConcern writeConcern; + + static SimpleMongoTransactionOptions of(Map options) { + return new SimpleMongoTransactionOptions(options); + } + + private SimpleMongoTransactionOptions(Map options) { + + this.maxCommitTime = doGetMaxCommitTime(options); + this.readConcern = doGetReadConcern(options); + this.readPreference = doGetReadPreference(options); + this.writeConcern = doGetWriteConcern(options); + } + + @Override + public @Nullable Duration getMaxCommitTime() { + return maxCommitTime; + } + + @Override + public @Nullable ReadConcern getReadConcern() { + return readConcern; + } + + @Override + public @Nullable ReadPreference getReadPreference() { + return readPreference; + } + + @Override + public @Nullable WriteConcern getWriteConcern() { + return writeConcern; + } + + @Override + public String toString() { + + return "DefaultMongoTransactionOptions{" + "maxCommitTime=" + maxCommitTime + ", readConcern=" + readConcern + + ", readPreference=" + readPreference + ", writeConcern=" + writeConcern + '}'; + } + + private static @Nullable Duration doGetMaxCommitTime(Map options) { + + return getValue(options, OptionKey.MAX_COMMIT_TIME, value -> { + + Duration timeout = Duration.parse(value); + Assert.isTrue(!timeout.isNegative(), "%s cannot be negative".formatted(OptionKey.MAX_COMMIT_TIME)); + return timeout; + }); + } + + private static @Nullable ReadConcern doGetReadConcern(Map options) { + return getValue(options, OptionKey.READ_CONCERN, value -> new ReadConcern(ReadConcernLevel.fromString(value))); + } + + private static @Nullable ReadPreference doGetReadPreference(Map options) { + return getValue(options, OptionKey.READ_PREFERENCE, ReadPreference::valueOf); + } + + private static @Nullable WriteConcern doGetWriteConcern(Map options) { + + return getValue(options, OptionKey.WRITE_CONCERN, value -> { + + WriteConcern writeConcern = WriteConcern.valueOf(value); + if (writeConcern == null) { + throw new IllegalArgumentException("'%s' is not a valid WriteConcern".formatted(options.get("writeConcern"))); + } + return writeConcern; + }); + } + + private static @Nullable T getValue(Map options, OptionKey key, + Function convertFunction) { + + String value = options.get(key.getKey()); + return value != null ? convertFunction.apply(value) : null; + } + + enum OptionKey { + + MAX_COMMIT_TIME("maxCommitTime"), READ_CONCERN("readConcern"), READ_PREFERENCE("readPreference"), WRITE_CONCERN( + "writeConcern"); + + final String key; + + OptionKey(String key) { + this.key = key; + } + + public String getKey() { + return key; + } + + @Override + public String toString() { + return getKey(); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java new file mode 100644 index 0000000000..a3d600270f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/SpringDataMongoDB.java @@ -0,0 +1,77 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.springframework.data.util.Version; +import org.springframework.util.StringUtils; + +import com.mongodb.MongoDriverInformation; + +/** + * Class that exposes the SpringData MongoDB specific information like the current {@link Version} or + * {@link MongoDriverInformation driver information}. + * + * @author Christoph Strobl + * @since 3.0 + */ +public class SpringDataMongoDB { + + private static final Log LOGGER = LogFactory.getLog(SpringDataMongoDB.class); + + private static final Version FALLBACK_VERSION = new Version(3); + private static final MongoDriverInformation DRIVER_INFORMATION = MongoDriverInformation + .builder(MongoDriverInformation.builder().build()).driverName("spring-data").build(); + + /** + * Obtain the SpringData MongoDB specific driver information. + * + * @return never {@literal null}. + */ + public static MongoDriverInformation driverInformation() { + return DRIVER_INFORMATION; + } + + /** + * Fetches the "Implementation-Version" manifest attribute from the jar file. + *
+ * Note that some ClassLoaders do not expose the package metadata, hence this class might not be able to determine the + * version in all environments. In this case the current Major version is returned as a fallback. + * + * @return never {@literal null}. + */ + public static Version version() { + + Package pkg = SpringDataMongoDB.class.getPackage(); + String versionString = (pkg != null ? pkg.getImplementationVersion() : null); + + if (!StringUtils.hasText(versionString)) { + + LOGGER.debug("Unable to find Spring Data MongoDB version."); + return FALLBACK_VERSION; + } + + try { + return Version.parse(versionString); + } catch (Exception e) { + LOGGER.debug(String.format("Cannot read Spring Data MongoDB version '%s'.", versionString)); + } + + return FALLBACK_VERSION; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionMetadata.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionMetadata.java new file mode 100644 index 0000000000..57ecec0342 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionMetadata.java @@ -0,0 +1,42 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import java.time.Duration; + +import org.jspecify.annotations.Nullable; + +/** + * MongoDB-specific transaction metadata. + * + * @author Christoph Strobl + * @since 4.3 + */ +public interface TransactionMetadata { + + /** + * @return the maximum commit time. Can be {@literal null} if not configured. + */ + @Nullable + Duration getMaxCommitTime(); + + /** + * @return {@literal true} if the max commit time is configured; {@literal false} otherwise. + */ + default boolean hasMaxCommitTime() { + return getMaxCommitTime() != null; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionOptionResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionOptionResolver.java new file mode 100644 index 0000000000..e42c26d95a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransactionOptionResolver.java @@ -0,0 +1,38 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.jspecify.annotations.Nullable; +import org.springframework.transaction.TransactionDefinition; + +/** + * Interface that defines a resolver for {@link TransactionMetadata} based on a {@link TransactionDefinition}. + * Transaction metadata is used to enrich the MongoDB transaction with additional information. + * + * @author Christoph Strobl + * @since 4.3 + */ +interface TransactionOptionResolver { + + /** + * Resolves the transaction metadata from a given {@link TransactionDefinition}. + * + * @param definition the {@link TransactionDefinition}. + * @return the resolved {@link TransactionMetadata} or {@literal null} if the resolver cannot resolve any metadata. + */ + @Nullable + T resolve(TransactionDefinition definition); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientClientSessionException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientClientSessionException.java new file mode 100644 index 0000000000..5446170ff9 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientClientSessionException.java @@ -0,0 +1,38 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.dao.TransientDataAccessException; + +/** + * {@link TransientDataAccessException} specific to MongoDB {@link com.mongodb.session.ClientSession} related data + * access failures such as reading data using an already closed session. + * + * @author Christoph Strobl + * @since 4.4 + */ +public class TransientClientSessionException extends TransientMongoDbException { + + /** + * Constructor for {@link TransientClientSessionException}. + * + * @param msg the detail message. + * @param cause the root cause. + */ + public TransientClientSessionException(String msg, Throwable cause) { + super(msg, cause); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientMongoDbException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientMongoDbException.java new file mode 100644 index 0000000000..cad05ca17c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/TransientMongoDbException.java @@ -0,0 +1,39 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb; + +import org.springframework.dao.TransientDataAccessException; + +/** + * Root of the hierarchy of MongoDB specific data access exceptions that are considered transient such as + * {@link com.mongodb.MongoException MongoExceptions} carrying {@link com.mongodb.MongoException#hasErrorLabel(String) + * specific labels}. + * + * @author Christoph Strobl + * @since 4.4 + */ +public class TransientMongoDbException extends TransientDataAccessException { + + /** + * Constructor for {@link TransientMongoDbException}. + * + * @param msg the detail message. + * @param cause the root cause. + */ + public TransientMongoDbException(String msg, Throwable cause) { + super(msg, cause); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/UncategorizedMongoDbException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/UncategorizedMongoDbException.java index 1fb5868407..69ec086e5a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/UncategorizedMongoDbException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/UncategorizedMongoDbException.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,14 @@ */ package org.springframework.data.mongodb; +import org.jspecify.annotations.Nullable; import org.springframework.dao.UncategorizedDataAccessException; public class UncategorizedMongoDbException extends UncategorizedDataAccessException { private static final long serialVersionUID = -2336595514062364929L; - public UncategorizedMongoDbException(String msg, Throwable cause) { + public UncategorizedMongoDbException(String msg, @Nullable Throwable cause) { super(msg, cause); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessor.java new file mode 100644 index 0000000000..2254b3c9a8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/LazyLoadingProxyAotProcessor.java @@ -0,0 +1,105 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import java.lang.annotation.Annotation; +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; + +import org.springframework.aot.generate.GenerationContext; +import org.springframework.aot.hint.TypeReference; +import org.springframework.core.annotation.AnnotatedElementUtils; +import org.springframework.core.annotation.MergedAnnotations; +import org.springframework.data.annotation.Reference; +import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory; +import org.springframework.data.mongodb.core.convert.LazyLoadingProxyFactory.LazyLoadingInterceptor; +import org.springframework.data.mongodb.core.mapping.DBRef; +import org.springframework.data.mongodb.core.mapping.DocumentReference; + +/** + * @author Christoph Strobl + * @since 4.0 + */ +public class LazyLoadingProxyAotProcessor { + + private boolean generalLazyLoadingProxyContributed = false; + + public void registerLazyLoadingProxyIfNeeded(Class type, GenerationContext generationContext) { + + Set refFields = getFieldsWithAnnotationPresent(type, Reference.class); + if (refFields.isEmpty()) { + return; + } + + refFields.stream() // + .filter(LazyLoadingProxyAotProcessor::isLazyLoading) // + .forEach(field -> { + + if (!generalLazyLoadingProxyContributed) { + generationContext.getRuntimeHints().proxies().registerJdkProxy( + TypeReference.of(org.springframework.data.mongodb.core.convert.LazyLoadingProxy.class), + TypeReference.of(org.springframework.aop.SpringProxy.class), + TypeReference.of(org.springframework.aop.framework.Advised.class), + TypeReference.of(org.springframework.core.DecoratingProxy.class)); + generalLazyLoadingProxyContributed = true; + } + + if (field.getType().isInterface()) { + + List> interfaces = new ArrayList<>( + Arrays.asList(LazyLoadingProxyFactory.prepareFactory(field.getType()).getProxiedInterfaces())); + interfaces.add(org.springframework.aop.SpringProxy.class); + interfaces.add(org.springframework.aop.framework.Advised.class); + interfaces.add(org.springframework.core.DecoratingProxy.class); + + generationContext.getRuntimeHints().proxies().registerJdkProxy(interfaces.toArray(Class[]::new)); + } else { + + Class proxyClass = LazyLoadingProxyFactory.resolveProxyType(field.getType(), + LazyLoadingInterceptor::none); + + // see: spring-projects/spring-framework/issues/29309 + generationContext.getRuntimeHints().reflection().registerType(proxyClass, MongoAotReflectionHelper::cglibProxyReflectionMemberAccess); + } + }); + } + + private static boolean isLazyLoading(Field field) { + if (AnnotatedElementUtils.isAnnotated(field, DBRef.class)) { + return AnnotatedElementUtils.findMergedAnnotation(field, DBRef.class).lazy(); + } + if (AnnotatedElementUtils.isAnnotated(field, DocumentReference.class)) { + return AnnotatedElementUtils.findMergedAnnotation(field, DocumentReference.class).lazy(); + } + return false; + } + + private static Set getFieldsWithAnnotationPresent(Class type, Class annotation) { + + Set fields = new LinkedHashSet<>(); + for (Field field : type.getDeclaredFields()) { + if (MergedAnnotations.from(field).get(annotation).isPresent()) { + fields.add(field); + } + } + return fields; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotPredicates.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotPredicates.java new file mode 100644 index 0000000000..86a70600a8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotPredicates.java @@ -0,0 +1,68 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import java.util.function.Predicate; + +import org.jspecify.annotations.Nullable; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.util.ReactiveWrappers; +import org.springframework.data.util.ReactiveWrappers.ReactiveLibrary; +import org.springframework.data.util.TypeUtils; +import org.springframework.util.ClassUtils; + +/** + * Collection of {@link Predicate predicates} to determine dynamic library aspects during AOT computation. Intended for + * internal usage only. + * + * @author Christoph Strobl + * @since 4.0 + */ +public class MongoAotPredicates { + + public static final Predicate> IS_SIMPLE_TYPE = (type) -> MongoSimpleTypes.HOLDER.isSimpleType(type) + || TypeUtils.type(type).isPartOf("org.bson"); + public static final Predicate IS_REACTIVE_LIBARARY_AVAILABLE = ReactiveWrappers::isAvailable; + public static final Predicate IS_SYNC_CLIENT_PRESENT = (classLoader) -> ClassUtils + .isPresent("com.mongodb.client.MongoClient", classLoader); + public static final Predicate IS_REACTIVE_CLIENT_PRESENT = (classLoader) -> ClassUtils + .isPresent("com.mongodb.reactivestreams.client.MongoClient", classLoader); + + /** + * @return {@literal true} if the Project Reactor is present. + */ + public static boolean isReactorPresent() { + return IS_REACTIVE_LIBARARY_AVAILABLE.test(ReactiveWrappers.ReactiveLibrary.PROJECT_REACTOR); + } + + /** + * @param classLoader can be {@literal null}. + * @return {@literal true} if the {@link com.mongodb.client.MongoClient} is present. + * @since 4.0 + */ + public static boolean isSyncClientPresent(@Nullable ClassLoader classLoader) { + return IS_SYNC_CLIENT_PRESENT.test(classLoader); + } + + /** + * @param classLoader can be {@literal null}. + * @return {@literal true} if the {@link com.mongodb.reactivestreams.client.MongoClient} is present. + * @since 4.3 + */ + public static boolean isReactiveClientPresent(@Nullable ClassLoader classLoader) { + return IS_REACTIVE_CLIENT_PRESENT.test(classLoader); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotReflectionHelper.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotReflectionHelper.java new file mode 100644 index 0000000000..ff8d04b382 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoAotReflectionHelper.java @@ -0,0 +1,31 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import org.springframework.aot.hint.MemberCategory; +import org.springframework.aot.hint.TypeHint.Builder; + +/** + * @author Christoph Strobl + */ +public final class MongoAotReflectionHelper { + + public static void cglibProxyReflectionMemberAccess(Builder builder) { + + builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, MemberCategory.INVOKE_DECLARED_METHODS, + MemberCategory.DECLARED_FIELDS); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoManagedTypesBeanRegistrationAotProcessor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoManagedTypesBeanRegistrationAotProcessor.java new file mode 100644 index 0000000000..4b7aa10c3f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoManagedTypesBeanRegistrationAotProcessor.java @@ -0,0 +1,56 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import org.jspecify.annotations.Nullable; +import org.springframework.aot.generate.GenerationContext; +import org.springframework.core.ResolvableType; +import org.springframework.data.aot.ManagedTypesBeanRegistrationAotProcessor; +import org.springframework.data.mongodb.MongoManagedTypes; +import org.springframework.util.ClassUtils; + +/** + * @author Christoph Strobl + * @since 2022/06 + */ +class MongoManagedTypesBeanRegistrationAotProcessor extends ManagedTypesBeanRegistrationAotProcessor { + + private final LazyLoadingProxyAotProcessor lazyLoadingProxyAotProcessor = new LazyLoadingProxyAotProcessor(); + + public MongoManagedTypesBeanRegistrationAotProcessor() { + setModuleIdentifier("mongo"); + } + + @Override + protected boolean isMatch(@Nullable Class beanType, @Nullable String beanName) { + return isMongoManagedTypes(beanType) || super.isMatch(beanType, beanName); + } + + protected boolean isMongoManagedTypes(@Nullable Class beanType) { + return beanType != null && ClassUtils.isAssignable(MongoManagedTypes.class, beanType); + } + + @Override + protected void contributeType(ResolvableType type, GenerationContext generationContext) { + + if (MongoAotPredicates.IS_SIMPLE_TYPE.test(type.toClass())) { + return; + } + + super.contributeType(type, generationContext); + lazyLoadingProxyAotProcessor.registerLazyLoadingProxyIfNeeded(type.toClass(), generationContext); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoRuntimeHints.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoRuntimeHints.java new file mode 100644 index 0000000000..f2442960ed --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/aot/MongoRuntimeHints.java @@ -0,0 +1,129 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.aot; + +import static org.springframework.data.mongodb.aot.MongoAotPredicates.isReactorPresent; + +import java.util.Arrays; + +import org.jspecify.annotations.Nullable; +import org.springframework.aot.hint.MemberCategory; +import org.springframework.aot.hint.RuntimeHints; +import org.springframework.aot.hint.RuntimeHintsRegistrar; +import org.springframework.aot.hint.TypeReference; +import org.springframework.data.mongodb.core.mapping.event.AfterConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback; +import org.springframework.util.ClassUtils; + +import com.mongodb.MongoClientSettings; +import com.mongodb.ServerAddress; +import com.mongodb.UnixServerAddress; +import com.mongodb.client.MapReduceIterable; +import com.mongodb.client.MongoDatabase; +import com.mongodb.client.model.IndexOptions; +import com.mongodb.reactivestreams.client.MapReducePublisher; + +/** + * {@link RuntimeHintsRegistrar} for repository types and entity callbacks. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.0 + */ +class MongoRuntimeHints implements RuntimeHintsRegistrar { + + @Override + public void registerHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + + hints.reflection().registerTypes( + Arrays.asList(TypeReference.of(BeforeConvertCallback.class), TypeReference.of(BeforeSaveCallback.class), + TypeReference.of(AfterConvertCallback.class), TypeReference.of(AfterSaveCallback.class)), + builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, + MemberCategory.INVOKE_PUBLIC_METHODS)); + + registerTransactionProxyHints(hints, classLoader); + registerMongoCompatibilityAdapterHints(hints, classLoader); + + if (isReactorPresent()) { + + hints.reflection() + .registerTypes(Arrays.asList(TypeReference.of(ReactiveBeforeConvertCallback.class), + TypeReference.of(ReactiveBeforeSaveCallback.class), TypeReference.of(ReactiveAfterConvertCallback.class), + TypeReference.of(ReactiveAfterSaveCallback.class)), + builder -> builder.withMembers(MemberCategory.INVOKE_DECLARED_CONSTRUCTORS, + MemberCategory.INVOKE_PUBLIC_METHODS)); + } + } + + private static void registerTransactionProxyHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + + if (MongoAotPredicates.isSyncClientPresent(classLoader) + && ClassUtils.isPresent("org.springframework.aop.SpringProxy", classLoader)) { + + hints.proxies().registerJdkProxy(TypeReference.of("com.mongodb.client.MongoDatabase"), + TypeReference.of("org.springframework.aop.SpringProxy"), + TypeReference.of("org.springframework.core.DecoratingProxy")); + hints.proxies().registerJdkProxy(TypeReference.of("com.mongodb.client.MongoCollection"), + TypeReference.of("org.springframework.aop.SpringProxy"), + TypeReference.of("org.springframework.core.DecoratingProxy")); + } + } + + @SuppressWarnings("deprecation") + private static void registerMongoCompatibilityAdapterHints(RuntimeHints hints, @Nullable ClassLoader classLoader) { + + hints.reflection() // + .registerType(MongoClientSettings.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(MongoClientSettings.Builder.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(IndexOptions.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(ServerAddress.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(UnixServerAddress.class, MemberCategory.INVOKE_PUBLIC_METHODS) // + .registerType(TypeReference.of("com.mongodb.connection.StreamFactoryFactory"), + MemberCategory.INTROSPECT_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.internal.connection.StreamFactoryFactory"), + MemberCategory.INTROSPECT_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.internal.build.MongoDriverVersion"), MemberCategory.PUBLIC_FIELDS); + + if (MongoAotPredicates.isSyncClientPresent(classLoader)) { + + hints.reflection() // + .registerType(MongoDatabase.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.client.internal.MongoDatabaseImpl"), + MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(MapReduceIterable.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.client.internal.MapReduceIterableImpl"), + MemberCategory.INVOKE_PUBLIC_METHODS); + } + + if (MongoAotPredicates.isReactiveClientPresent(classLoader)) { + + hints.reflection() // + .registerType(com.mongodb.reactivestreams.client.MongoDatabase.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.reactivestreams.client.internal.MongoDatabaseImpl"), + MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(MapReducePublisher.class, MemberCategory.INVOKE_PUBLIC_METHODS) + .registerType(TypeReference.of("com.mongodb.reactivestreams.client.internal.MapReducePublisherImpl"), + MemberCategory.INVOKE_PUBLIC_METHODS); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoClientConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoClientConfiguration.java index 2daadb7caf..93033417fb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoClientConfiguration.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoClientConfiguration.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,17 +17,20 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.SpringDataMongoDB; import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.SimpleMongoClientDbFactory; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; import org.springframework.data.mongodb.core.convert.DbRefResolver; import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; -import org.springframework.data.mongodb.core.mapping.Document; -import org.springframework.lang.Nullable; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoClientSettings.Builder; import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; /** * Base class for Spring Data MongoDB configuration using JavaConfig with {@link com.mongodb.client.MongoClient}. @@ -35,77 +38,74 @@ * @author Christoph Strobl * @since 2.1 * @see MongoConfigurationSupport - * @see AbstractMongoConfiguration */ -@Configuration +@Configuration(proxyBeanMethods = false) public abstract class AbstractMongoClientConfiguration extends MongoConfigurationSupport { /** * Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a - * {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}. + * {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
+ * Override {@link #mongoClientSettings()} to configure connection details. * - * @return + * @return never {@literal null}. + * @see #mongoClientSettings() + * @see #configureClientSettings(Builder) */ - public abstract MongoClient mongoClient(); + public MongoClient mongoClient() { + return createMongoClient(mongoClientSettings()); + } /** * Creates a {@link MongoTemplate}. * - * @return + * @see #mongoDbFactory() + * @see #mappingMongoConverter(MongoDatabaseFactory, MongoCustomConversions, MongoMappingContext) */ @Bean - public MongoTemplate mongoTemplate() throws Exception { - return new MongoTemplate(mongoDbFactory(), mappingMongoConverter()); + public MongoTemplate mongoTemplate(MongoDatabaseFactory databaseFactory, MappingMongoConverter converter) { + return new MongoTemplate(databaseFactory, converter); } /** - * Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link MongoClient} - * instance configured in {@link #mongoClient()}. + * Creates a {@link org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory} to be used by the + * {@link MongoTemplate}. Will use the {@link MongoClient} instance configured in {@link #mongoClient()}. * * @see #mongoClient() - * @see #mongoTemplate() - * @return + * @see #mongoTemplate(MongoDatabaseFactory, MappingMongoConverter) */ @Bean - public MongoDbFactory mongoDbFactory() { - return new SimpleMongoClientDbFactory(mongoClient(), getDatabaseName()); - } - - /** - * Return the base package to scan for mapped {@link Document}s. Will return the package name of the configuration - * class' (the concrete class, not this one here) by default. So if you have a {@code com.acme.AppConfig} extending - * {@link AbstractMongoClientConfiguration} the base package will be considered {@code com.acme} unless the method is - * overridden to implement alternate behavior. - * - * @return the base package to scan for mapped {@link Document} classes or {@literal null} to not enable scanning for - * entities. - * @deprecated use {@link #getMappingBasePackages()} instead. - */ - @Deprecated - @Nullable - protected String getMappingBasePackage() { - - Package mappingBasePackage = getClass().getPackage(); - return mappingBasePackage == null ? null : mappingBasePackage.getName(); + public MongoDatabaseFactory mongoDbFactory() { + return new SimpleMongoClientDatabaseFactory(mongoClient(), getDatabaseName()); } /** * Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and - * {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied. + * {@link #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)}. Will get {@link #customConversions()} applied. * * @see #customConversions() - * @see #mongoMappingContext() + * @see #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes) * @see #mongoDbFactory() - * @return - * @throws Exception */ @Bean - public MappingMongoConverter mappingMongoConverter() throws Exception { + public MappingMongoConverter mappingMongoConverter(MongoDatabaseFactory databaseFactory, + MongoCustomConversions customConversions, MongoMappingContext mappingContext) { - DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory()); - MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext()); - converter.setCustomConversions(customConversions()); + DbRefResolver dbRefResolver = new DefaultDbRefResolver(databaseFactory); + MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext); + converter.setCustomConversions(customConversions); + converter.setCodecRegistryProvider(databaseFactory); return converter; } + + /** + * Create the Reactive Streams {@link com.mongodb.reactivestreams.client.MongoClient} instance with given + * {@link MongoClientSettings}. + * + * @return never {@literal null}. + * @since 3.0 + */ + protected MongoClient createMongoClient(MongoClientSettings settings) { + return MongoClients.create(settings, SpringDataMongoDB.driverInformation()); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoConfiguration.java deleted file mode 100644 index b4b8d47170..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractMongoConfiguration.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.config; - -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.data.mongodb.core.MongoTemplate; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; -import org.springframework.data.mongodb.core.convert.DbRefResolver; -import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; -import org.springframework.data.mongodb.core.convert.MappingMongoConverter; -import org.springframework.data.mongodb.core.mapping.Document; -import org.springframework.lang.Nullable; - -import com.mongodb.MongoClient; - -/** - * Base class for Spring Data MongoDB configuration using JavaConfig with {@link com.mongodb.MongoClient}. - *

- * INFO:In case you want to use {@link com.mongodb.client.MongoClients} for configuration please refer - * to {@link AbstractMongoClientConfiguration}. - * - * @author Mark Pollack - * @author Oliver Gierke - * @author Thomas Darimont - * @author Ryan Tenney - * @author Christoph Strobl - * @author Mark Paluch - * @see MongoConfigurationSupport - * @see AbstractMongoClientConfiguration - */ -@Configuration -public abstract class AbstractMongoConfiguration extends MongoConfigurationSupport { - - /** - * Return the {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want to expose a - * {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}. - * - * @return - */ - public abstract MongoClient mongoClient(); - - /** - * Creates a {@link MongoTemplate}. - * - * @return - */ - @Bean - public MongoTemplate mongoTemplate() throws Exception { - return new MongoTemplate(mongoDbFactory(), mappingMongoConverter()); - } - - /** - * Creates a {@link SimpleMongoDbFactory} to be used by the {@link MongoTemplate}. Will use the {@link MongoClient} - * instance configured in {@link #mongoClient()}. - * - * @see #mongoClient() - * @see #mongoTemplate() - * @return - */ - @Bean - public MongoDbFactory mongoDbFactory() { - return new SimpleMongoDbFactory(mongoClient(), getDatabaseName()); - } - - /** - * Return the base package to scan for mapped {@link Document}s. Will return the package name of the configuration - * class' (the concrete class, not this one here) by default. So if you have a {@code com.acme.AppConfig} extending - * {@link AbstractMongoConfiguration} the base package will be considered {@code com.acme} unless the method is - * overridden to implement alternate behavior. - * - * @return the base package to scan for mapped {@link Document} classes or {@literal null} to not enable scanning for - * entities. - * @deprecated use {@link #getMappingBasePackages()} instead. - */ - @Deprecated - @Nullable - protected String getMappingBasePackage() { - - Package mappingBasePackage = getClass().getPackage(); - return mappingBasePackage == null ? null : mappingBasePackage.getName(); - } - - /** - * Creates a {@link MappingMongoConverter} using the configured {@link #mongoDbFactory()} and - * {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied. - * - * @see #customConversions() - * @see #mongoMappingContext() - * @see #mongoDbFactory() - * @return - * @throws Exception - */ - @Bean - public MappingMongoConverter mappingMongoConverter() throws Exception { - - DbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory()); - MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mongoMappingContext()); - converter.setCustomConversions(customConversions()); - - return converter; - } - -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java index 6917f4aa6e..f93c4ae708 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/AbstractReactiveMongoConfiguration.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,13 +18,19 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.SpringDataMongoDB; import org.springframework.data.mongodb.core.ReactiveMongoOperations; import org.springframework.data.mongodb.core.ReactiveMongoTemplate; import org.springframework.data.mongodb.core.SimpleReactiveMongoDatabaseFactory; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoClientSettings.Builder; import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoClients; /** * Base class for reactive Spring Data MongoDB configuration using JavaConfig. @@ -34,25 +40,33 @@ * @since 2.0 * @see MongoConfigurationSupport */ -@Configuration +@Configuration(proxyBeanMethods = false) public abstract class AbstractReactiveMongoConfiguration extends MongoConfigurationSupport { /** * Return the Reactive Streams {@link MongoClient} instance to connect to. Annotate with {@link Bean} in case you want - * to expose a {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}. + * to expose a {@link MongoClient} instance to the {@link org.springframework.context.ApplicationContext}.
+ * Override {@link #mongoClientSettings()} to configure connection details. * * @return never {@literal null}. + * @see #mongoClientSettings() + * @see #configureClientSettings(Builder) */ - public abstract MongoClient reactiveMongoClient(); + public MongoClient reactiveMongoClient() { + return createReactiveMongoClient(mongoClientSettings()); + } /** * Creates {@link ReactiveMongoOperations}. * + * @see #reactiveMongoDbFactory() + * @see #mappingMongoConverter(ReactiveMongoDatabaseFactory, MongoCustomConversions, MongoMappingContext) * @return never {@literal null}. */ @Bean - public ReactiveMongoOperations reactiveMongoTemplate() throws Exception { - return new ReactiveMongoTemplate(reactiveMongoDbFactory(), mappingMongoConverter()); + public ReactiveMongoTemplate reactiveMongoTemplate(ReactiveMongoDatabaseFactory databaseFactory, + MappingMongoConverter mongoConverter) { + return new ReactiveMongoTemplate(databaseFactory, mongoConverter); } /** @@ -60,7 +74,7 @@ public ReactiveMongoOperations reactiveMongoTemplate() throws Exception { * {@link MongoClient} instance configured in {@link #reactiveMongoClient()}. * * @see #reactiveMongoClient() - * @see #reactiveMongoTemplate() + * @see #reactiveMongoTemplate(ReactiveMongoDatabaseFactory, MappingMongoConverter) * @return never {@literal null}. */ @Bean @@ -70,20 +84,31 @@ public ReactiveMongoDatabaseFactory reactiveMongoDbFactory() { /** * Creates a {@link MappingMongoConverter} using the configured {@link #reactiveMongoDbFactory()} and - * {@link #mongoMappingContext()}. Will get {@link #customConversions()} applied. + * {@link #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes)}. Will get {@link #customConversions()} applied. * * @see #customConversions() - * @see #mongoMappingContext() + * @see #mongoMappingContext(MongoCustomConversions, org.springframework.data.mongodb.MongoManagedTypes) * @see #reactiveMongoDbFactory() * @return never {@literal null}. - * @throws Exception */ @Bean - public MappingMongoConverter mappingMongoConverter() throws Exception { + public MappingMongoConverter mappingMongoConverter(ReactiveMongoDatabaseFactory databaseFactory, + MongoCustomConversions customConversions, MongoMappingContext mappingContext) { - MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mongoMappingContext()); - converter.setCustomConversions(customConversions()); + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + converter.setCustomConversions(customConversions); + converter.setCodecRegistryProvider(databaseFactory); return converter; } + + /** + * Create the Reactive Streams {@link MongoClient} instance with given {@link MongoClientSettings}. + * + * @return never {@literal null}. + * @since 3.0 + */ + protected MongoClient createReactiveMongoClient(MongoClientSettings settings) { + return MongoClients.create(settings, SpringDataMongoDB.driverInformation()); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/BeanNames.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/BeanNames.java index 7865028887..584fbfba30 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/BeanNames.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/BeanNames.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ConnectionStringPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ConnectionStringPropertyEditor.java new file mode 100644 index 0000000000..0f6ba01704 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ConnectionStringPropertyEditor.java @@ -0,0 +1,42 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import java.beans.PropertyEditorSupport; + +import org.jspecify.annotations.Nullable; +import org.springframework.util.StringUtils; + +import com.mongodb.ConnectionString; + +/** + * Parse a {@link String} to a {@link com.mongodb.ConnectionString}. + * + * @author Christoph Strobl + * @since 3.0 + */ +public class ConnectionStringPropertyEditor extends PropertyEditorSupport { + + @Override + public void setAsText(@Nullable String connectionString) { + + if (!StringUtils.hasText(connectionString)) { + return; + } + + setValue(new ConnectionString(connectionString)); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableMongoAuditing.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableMongoAuditing.java index 9035da6115..d6ce19f3ee 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableMongoAuditing.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableMongoAuditing.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -42,29 +42,29 @@ /** * Configures the {@link AuditorAware} bean to be used to lookup the current principal. * - * @return + * @return empty {@link String} by default. */ String auditorAwareRef() default ""; /** * Configures whether the creation and modification dates are set. Defaults to {@literal true}. * - * @return + * @return {@literal true} by default. */ boolean setDates() default true; /** * Configures whether the entity shall be marked as modified on creation. Defaults to {@literal true}. * - * @return + * @return {@literal true} by default. */ boolean modifyOnCreate() default true; /** - * Configures a {@link DateTimeProvider} bean name that allows customizing the {@link org.joda.time.DateTime} to be - * used for setting creation and modification dates. + * Configures a {@link DateTimeProvider} bean name that allows customizing the timestamp to be used for setting + * creation and modification dates. * - * @return + * @return empty {@link String} by default. */ String dateTimeProviderRef() default ""; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableReactiveMongoAuditing.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableReactiveMongoAuditing.java new file mode 100644 index 0000000000..21fadf86c6 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/EnableReactiveMongoAuditing.java @@ -0,0 +1,70 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Inherited; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import org.springframework.context.annotation.Import; +import org.springframework.data.auditing.DateTimeProvider; +import org.springframework.data.domain.ReactiveAuditorAware; + +/** + * Annotation to enable auditing in MongoDB using reactive infrastructure via annotation configuration. + * + * @author Mark Paluch + * @since 3.1 + */ +@Inherited +@Documented +@Target(ElementType.TYPE) +@Retention(RetentionPolicy.RUNTIME) +@Import(ReactiveMongoAuditingRegistrar.class) +public @interface EnableReactiveMongoAuditing { + + /** + * Configures the {@link ReactiveAuditorAware} bean to be used to lookup the current principal. + * + * @return empty {@link String} by default. + */ + String auditorAwareRef() default ""; + + /** + * Configures whether the creation and modification dates are set. Defaults to {@literal true}. + * + * @return {@literal true} by default. + */ + boolean setDates() default true; + + /** + * Configures whether the entity shall be marked as modified on creation. Defaults to {@literal true}. + * + * @return {@literal true} by default. + */ + boolean modifyOnCreate() default true; + + /** + * Configures a {@link DateTimeProvider} bean name that allows customizing the timestamp to be used for setting + * creation and modification dates. + * + * @return empty {@link String} by default. + */ + String dateTimeProviderRef() default ""; +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GeoJsonConfiguration.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GeoJsonConfiguration.java index 048913ed4e..3b10019cc0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GeoJsonConfiguration.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GeoJsonConfiguration.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GridFsTemplateParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GridFsTemplateParser.java index a33674ef0a..b86da91dad 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GridFsTemplateParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/GridFsTemplateParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -34,10 +34,6 @@ */ class GridFsTemplateParser extends AbstractBeanDefinitionParser { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext) throws BeanDefinitionStoreException { @@ -46,10 +42,6 @@ protected String resolveId(Element element, AbstractBeanDefinition definition, P return StringUtils.hasText(id) ? id : BeanNames.GRID_FS_TEMPLATE_BEAN_NAME; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java index 55b84cf7ec..f3a7dc0437 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MappingMongoConverterParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,13 +18,12 @@ import static org.springframework.data.mongodb.config.BeanNames.*; import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; import java.util.List; import java.util.Set; +import org.jspecify.annotations.NullUnmarked; +import org.jspecify.annotations.Nullable; import org.springframework.beans.BeanMetadataElement; -import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.BeanDefinitionHolder; import org.springframework.beans.factory.config.RuntimeBeanReference; @@ -51,7 +50,6 @@ import org.springframework.core.type.filter.TypeFilter; import org.springframework.data.annotation.Persistent; import org.springframework.data.config.BeanComponentDefinitionBuilder; -import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory; import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoCustomConversions; @@ -62,6 +60,7 @@ import org.springframework.data.mongodb.core.mapping.event.ValidatingMongoEventListener; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; import org.w3c.dom.Element; @@ -75,11 +74,14 @@ * @author Thomas Darimont * @author Christoph Strobl * @author Mark Paluch + * @author Zied Yaich + * @author Tomasz Forys */ +@NullUnmarked public class MappingMongoConverterParser implements BeanDefinitionParser { private static final String BASE_PACKAGE = "base-package"; - private static final boolean JSR_303_PRESENT = ClassUtils.isPresent("javax.validation.Validator", + private static final boolean JSR_303_PRESENT = ClassUtils.isPresent("jakarta.validation.Validator", MappingMongoConverterParser.class.getClassLoader()); /* (non-Javadoc) @@ -95,12 +97,12 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { String id = element.getAttribute(AbstractBeanDefinitionParser.ID_ATTRIBUTE); id = StringUtils.hasText(id) ? id : DEFAULT_CONVERTER_BEAN_NAME; + boolean autoIndexCreationEnabled = isAutoIndexCreationEnabled(element); + parserContext.pushContainingComponent(new CompositeComponentDefinition("Mapping Mongo Converter", element)); BeanDefinition conversionsDefinition = getCustomConversions(element, parserContext); - String ctxRef = potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, id); - - createIsNewStrategyFactoryBeanDefinition(ctxRef, parserContext, element); + String ctxRef = potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, id, autoIndexCreationEnabled); // Need a reference to a Mongo instance String dbFactoryRef = element.getAttribute("db-factory-ref"); @@ -133,9 +135,7 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { new BeanComponentDefinition(indexOperationsProviderBuilder.getBeanDefinition(), "indexOperationsProvider")); } - try { - registry.getBeanDefinition(INDEX_HELPER_BEAN_NAME); - } catch (NoSuchBeanDefinitionException ignored) { + if (!registry.containsBeanDefinition(INDEX_HELPER_BEAN_NAME)) { BeanDefinitionBuilder indexHelperBuilder = BeanDefinitionBuilder .genericBeanDefinition(MongoPersistentEntityIndexCreator.class); @@ -149,7 +149,7 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { BeanDefinition validatingMongoEventListener = potentiallyCreateValidatingMongoEventListener(element, parserContext); - if (validatingMongoEventListener != null) { + if (validatingMongoEventListener != null && !registry.containsBeanDefinition(VALIDATING_EVENT_LISTENER_BEAN_NAME)) { parserContext.registerBeanComponent( new BeanComponentDefinition(validatingMongoEventListener, VALIDATING_EVENT_LISTENER_BEAN_NAME)); } @@ -159,18 +159,19 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { return null; } - private BeanDefinition potentiallyCreateValidatingMongoEventListener(Element element, ParserContext parserContext) { + private @Nullable BeanDefinition potentiallyCreateValidatingMongoEventListener(Element element, ParserContext parserContext) { String disableValidation = element.getAttribute("disable-validation"); - boolean validationDisabled = StringUtils.hasText(disableValidation) && Boolean.valueOf(disableValidation); + boolean validationDisabled = StringUtils.hasText(disableValidation) && Boolean.parseBoolean(disableValidation); if (!validationDisabled) { BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(); - RuntimeBeanReference validator = getValidator(builder, parserContext); + RuntimeBeanReference validator = getValidator(element, parserContext); if (validator != null) { builder.getRawBeanDefinition().setBeanClass(ValidatingMongoEventListener.class); + builder.getRawBeanDefinition().setSource(element); builder.addConstructorArgValue(validator); return builder.getBeanDefinition(); @@ -180,6 +181,7 @@ private BeanDefinition potentiallyCreateValidatingMongoEventListener(Element ele return null; } + @Nullable private RuntimeBeanReference getValidator(Object source, ParserContext parserContext) { if (!JSR_303_PRESENT) { @@ -191,13 +193,39 @@ private RuntimeBeanReference getValidator(Object source, ParserContext parserCon validatorDef.setSource(source); validatorDef.setRole(BeanDefinition.ROLE_INFRASTRUCTURE); String validatorName = parserContext.getReaderContext().registerWithGeneratedName(validatorDef); - parserContext.registerBeanComponent(new BeanComponentDefinition(validatorDef, validatorName)); return new RuntimeBeanReference(validatorName); } + private static boolean isAutoIndexCreationEnabled(Element element) { + + String autoIndexCreation = element.getAttribute("auto-index-creation"); + return StringUtils.hasText(autoIndexCreation) && Boolean.parseBoolean(autoIndexCreation); + } + + /** + * Create and register the {@link BeanDefinition} for a {@link MongoMappingContext} if not explicitly referenced by a + * given {@literal mapping-context-ref} {@link Element#getAttribute(String) attribuite}. + * + * @return the mapping context bean name. + * @deprecated since 4.3. Use + * {@link #potentiallyCreateMappingContext(Element, ParserContext, BeanDefinition, String, boolean)} + * instead. + */ + @Deprecated(since = "4.3", forRemoval = true) + public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext, + @Nullable BeanDefinition conversionsDefinition, @Nullable String converterId) { + return potentiallyCreateMappingContext(element, parserContext, conversionsDefinition, converterId, false); + } + + /** + * Create and register the {@link BeanDefinition} for a {@link MongoMappingContext} if not explicitly referenced by a + * given {@literal mapping-context-ref} {@link Element#getAttribute(String) attribuite}. + * + * @return the mapping context bean name. + */ public static String potentiallyCreateMappingContext(Element element, ParserContext parserContext, - BeanDefinition conversionsDefinition, String converterId) { + @Nullable BeanDefinition conversionsDefinition, @Nullable String converterId, boolean autoIndexCreation) { String ctxRef = element.getAttribute("mapping-context-ref"); @@ -211,7 +239,7 @@ public static String potentiallyCreateMappingContext(Element element, ParserCont BeanDefinitionBuilder mappingContextBuilder = BeanDefinitionBuilder .genericBeanDefinition(MongoMappingContext.class); - Set classesToAdd = getInititalEntityClasses(element); + Set classesToAdd = getInitialEntityClasses(element); if (classesToAdd != null) { mappingContextBuilder.addPropertyValue("initialEntitySet", classesToAdd); @@ -225,6 +253,8 @@ public static String potentiallyCreateMappingContext(Element element, ParserCont mappingContextBuilder.addPropertyValue("simpleTypeHolder", simpleTypesDefinition); } + mappingContextBuilder.addPropertyValue("autoIndexCreation", autoIndexCreation); + parseFieldNamingStrategy(element, parserContext.getReaderContext(), mappingContextBuilder); ctxRef = converterId == null || DEFAULT_CONVERTER_BEAN_NAME.equals(converterId) ? MAPPING_CONTEXT_BEAN_NAME @@ -244,7 +274,7 @@ private static void parseFieldNamingStrategy(Element element, ReaderContext cont && Boolean.parseBoolean(abbreviateFieldNames); if (fieldNamingStrategyReferenced && abbreviationActivated) { - context.error("Field name abbreviation cannot be activated if a field-naming-strategy-ref is configured!", + context.error("Field name abbreviation cannot be activated if a field-naming-strategy-ref is configured", element); return; } @@ -262,17 +292,17 @@ private static void parseFieldNamingStrategy(Element element, ReaderContext cont } } - private BeanDefinition getCustomConversions(Element element, ParserContext parserContext) { + private @Nullable BeanDefinition getCustomConversions(Element element, ParserContext parserContext) { List customConvertersElements = DomUtils.getChildElementsByTagName(element, "custom-converters"); if (customConvertersElements.size() == 1) { Element customerConvertersElement = customConvertersElements.get(0); - ManagedList converterBeans = new ManagedList(); + ManagedList converterBeans = new ManagedList<>(); List converterElements = DomUtils.getChildElementsByTagName(customerConvertersElement, "converter"); - if (converterElements != null) { + if (!ObjectUtils.isEmpty(converterElements)) { for (Element listenerElement : converterElements) { converterBeans.add(parseConverter(listenerElement, parserContext)); } @@ -285,9 +315,7 @@ private BeanDefinition getCustomConversions(Element element, ParserContext parse provider.addExcludeFilter(new NegatingFilter(new AssignableTypeFilter(Converter.class), new AssignableTypeFilter(GenericConverter.class))); - for (BeanDefinition candidate : provider.findCandidateComponents(packageToScan)) { - converterBeans.add(candidate); - } + converterBeans.addAll(provider.findCandidateComponents(packageToScan)); } BeanDefinitionBuilder conversionsBuilder = BeanDefinitionBuilder.rootBeanDefinition(MongoCustomConversions.class); @@ -304,7 +332,8 @@ private BeanDefinition getCustomConversions(Element element, ParserContext parse return null; } - private static Set getInititalEntityClasses(Element element) { + @Nullable + private static Set getInitialEntityClasses(Element element) { String basePackage = element.getAttribute(BASE_PACKAGE); @@ -317,7 +346,7 @@ private static Set getInititalEntityClasses(Element element) { componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class)); componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class)); - Set classes = new ManagedSet(); + Set classes = new ManagedSet<>(); for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) { classes.add(candidate.getBeanClassName()); } @@ -325,7 +354,7 @@ private static Set getInititalEntityClasses(Element element) { return classes; } - public BeanMetadataElement parseConverter(Element element, ParserContext parserContext) { + public @Nullable BeanMetadataElement parseConverter(Element element, ParserContext parserContext) { String converterRef = element.getAttribute("ref"); if (StringUtils.hasText(converterRef)) { @@ -343,20 +372,6 @@ public BeanMetadataElement parseConverter(Element element, ParserContext parserC return null; } - public static String createIsNewStrategyFactoryBeanDefinition(String mappingContextRef, ParserContext context, - Element element) { - - BeanDefinitionBuilder mappingContextStrategyFactoryBuilder = BeanDefinitionBuilder - .rootBeanDefinition(MappingContextIsNewStrategyFactory.class); - mappingContextStrategyFactoryBuilder.addConstructorArgReference(mappingContextRef); - - BeanComponentDefinitionBuilder builder = new BeanComponentDefinitionBuilder(element, context); - context.registerBeanComponent( - builder.getComponent(mappingContextStrategyFactoryBuilder, IS_NEW_STRATEGY_FACTORY_BEAN_NAME)); - - return IS_NEW_STRATEGY_FACTORY_BEAN_NAME; - } - /** * {@link TypeFilter} that returns {@literal false} in case any of the given delegates matches. * @@ -375,13 +390,9 @@ public NegatingFilter(TypeFilter... filters) { Assert.notNull(filters, "TypeFilters must not be null"); - this.delegates = new HashSet(Arrays.asList(filters)); + this.delegates = Set.of(filters); } - /* - * (non-Javadoc) - * @see org.springframework.core.type.filter.TypeFilter#match(org.springframework.core.type.classreading.MetadataReader, org.springframework.core.type.classreading.MetadataReaderFactory) - */ public boolean match(MetadataReader metadataReader, MetadataReaderFactory metadataReaderFactory) throws IOException { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingBeanDefinitionParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingBeanDefinitionParser.java index 991ffe0c44..a304199776 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingBeanDefinitionParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingBeanDefinitionParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,6 +18,9 @@ import static org.springframework.data.config.ParsingUtils.*; import static org.springframework.data.mongodb.config.BeanNames.*; +import org.jspecify.annotations.NullUnmarked; +import org.jspecify.annotations.Nullable; +import org.springframework.beans.factory.support.AbstractBeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.beans.factory.support.RootBeanDefinition; @@ -26,40 +29,36 @@ import org.springframework.beans.factory.xml.ParserContext; import org.springframework.data.auditing.config.IsNewAwareAuditingHandlerBeanDefinitionParser; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener; +import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback; +import org.springframework.util.ClassUtils; import org.springframework.util.StringUtils; + import org.w3c.dom.Element; /** - * {@link BeanDefinitionParser} to register a {@link AuditingEventListener} to transparently set auditing information on - * an entity. + * {@link BeanDefinitionParser} to register a {@link AuditingEntityCallback} to transparently set auditing information + * on an entity. * * @author Oliver Gierke + * @author Mark Paluch */ +@NullUnmarked public class MongoAuditingBeanDefinitionParser extends AbstractSingleBeanDefinitionParser { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#getBeanClass(org.w3c.dom.Element) - */ + private static boolean PROJECT_REACTOR_AVAILABLE = ClassUtils.isPresent("reactor.core.publisher.Mono", + MongoAuditingRegistrar.class.getClassLoader()); + @Override protected Class getBeanClass(Element element) { - return AuditingEventListener.class; + return AuditingEntityCallback.class; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#shouldGenerateId() - */ @Override protected boolean shouldGenerateId() { return true; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractSingleBeanDefinitionParser#doParse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext, org.springframework.beans.factory.support.BeanDefinitionBuilder) - */ @Override protected void doParse(Element element, ParserContext parserContext, BeanDefinitionBuilder builder) { @@ -80,7 +79,24 @@ protected void doParse(Element element, ParserContext parserContext, BeanDefinit mappingContextRef); parser.parse(element, parserContext); - builder.addConstructorArgValue(getObjectFactoryBeanDefinition(parser.getResolvedBeanName(), - parserContext.extractSource(element))); + AbstractBeanDefinition isNewAwareAuditingHandler = getObjectFactoryBeanDefinition(parser.getResolvedBeanName(), + parserContext.extractSource(element)); + builder.addConstructorArgValue(isNewAwareAuditingHandler); + + if (PROJECT_REACTOR_AVAILABLE) { + registerReactiveAuditingEntityCallback(parserContext.getRegistry(), isNewAwareAuditingHandler, + parserContext.extractSource(element)); + } + } + + private void registerReactiveAuditingEntityCallback(BeanDefinitionRegistry registry, + AbstractBeanDefinition isNewAwareAuditingHandler, @Nullable Object source) { + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class); + + builder.addConstructorArgValue(isNewAwareAuditingHandler); + builder.getRawBeanDefinition().setSource(source); + + registry.registerBeanDefinition(ReactiveAuditingEntityCallback.class.getName(), builder.getBeanDefinition()); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingRegistrar.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingRegistrar.java index 4ad7944acc..37e509a38a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingRegistrar.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoAuditingRegistrar.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,22 +17,16 @@ import java.lang.annotation.Annotation; -import org.springframework.beans.factory.FactoryBean; import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.support.AbstractBeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionBuilder; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; -import org.springframework.core.type.AnnotationMetadata; +import org.springframework.core.Ordered; import org.springframework.data.auditing.IsNewAwareAuditingHandler; import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport; import org.springframework.data.auditing.config.AuditingConfiguration; import org.springframework.data.config.ParsingUtils; -import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mongodb.core.convert.MappingMongoConverter; -import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; -import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.data.mongodb.core.mapping.event.AuditingEventListener; +import org.springframework.data.mongodb.core.mapping.event.AuditingEntityCallback; import org.springframework.util.Assert; /** @@ -40,123 +34,55 @@ * * @author Thomas Darimont * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl */ -class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport { +class MongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport implements Ordered { - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAnnotation() - */ @Override protected Class getAnnotation() { return EnableMongoAuditing.class; } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditingHandlerBeanName() - */ @Override protected String getAuditingHandlerBeanName() { return "mongoAuditingHandler"; } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerBeanDefinitions(org.springframework.core.type.AnnotationMetadata, org.springframework.beans.factory.support.BeanDefinitionRegistry) - */ @Override - public void registerBeanDefinitions(AnnotationMetadata annotationMetadata, BeanDefinitionRegistry registry) { - - Assert.notNull(annotationMetadata, "AnnotationMetadata must not be null!"); - Assert.notNull(registry, "BeanDefinitionRegistry must not be null!"); + protected void postProcess(BeanDefinitionBuilder builder, AuditingConfiguration configuration, + BeanDefinitionRegistry registry) { - super.registerBeanDefinitions(annotationMetadata, registry); + builder.setFactoryMethod("from").addConstructorArgReference("mongoMappingContext"); } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#getAuditHandlerBeanDefinitionBuilder(org.springframework.data.auditing.config.AuditingConfiguration) - */ @Override protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) { - Assert.notNull(configuration, "AuditingConfiguration must not be null!"); - - BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class); + Assert.notNull(configuration, "AuditingConfiguration must not be null"); - BeanDefinitionBuilder definition = BeanDefinitionBuilder.genericBeanDefinition(MongoMappingContextLookup.class); - definition.setAutowireMode(AbstractBeanDefinition.AUTOWIRE_CONSTRUCTOR); - - builder.addConstructorArgValue(definition.getBeanDefinition()); - return configureDefaultAuditHandlerAttributes(configuration, builder); + return configureDefaultAuditHandlerAttributes(configuration, + BeanDefinitionBuilder.rootBeanDefinition(IsNewAwareAuditingHandler.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport#registerAuditListener(org.springframework.beans.factory.config.BeanDefinition, org.springframework.beans.factory.support.BeanDefinitionRegistry) - */ @Override protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition, BeanDefinitionRegistry registry) { - Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null!"); - Assert.notNull(registry, "BeanDefinitionRegistry must not be null!"); + Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null"); + Assert.notNull(registry, "BeanDefinitionRegistry must not be null"); BeanDefinitionBuilder listenerBeanDefinitionBuilder = BeanDefinitionBuilder - .rootBeanDefinition(AuditingEventListener.class); + .rootBeanDefinition(AuditingEntityCallback.class); listenerBeanDefinitionBuilder .addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry)); registerInfrastructureBeanWithId(listenerBeanDefinitionBuilder.getBeanDefinition(), - AuditingEventListener.class.getName(), registry); + AuditingEntityCallback.class.getName(), registry); } - /** - * Simple helper to be able to wire the {@link MappingContext} from a {@link MappingMongoConverter} bean available in - * the application context. - * - * @author Oliver Gierke - */ - static class MongoMappingContextLookup - implements FactoryBean, MongoPersistentProperty>> { - - private final MappingMongoConverter converter; - - /** - * Creates a new {@link MongoMappingContextLookup} for the given {@link MappingMongoConverter}. - * - * @param converter must not be {@literal null}. - */ - public MongoMappingContextLookup(MappingMongoConverter converter) { - this.converter = converter; - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObject() - */ - @Override - public MappingContext, MongoPersistentProperty> getObject() throws Exception { - return converter.getMappingContext(); - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - @Override - public Class getObjectType() { - return MappingContext.class; - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#isSingleton() - */ - @Override - public boolean isSingleton() { - return true; - } + @Override + public int getOrder() { + return Ordered.LOWEST_PRECEDENCE; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoClientParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoClientParser.java index 5ae3725354..501c00b9d6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoClientParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoClientParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -35,10 +35,6 @@ */ public class MongoClientParser implements BeanDefinitionParser { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.BeanDefinitionParser#parse(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ public BeanDefinition parse(Element element, ParserContext parserContext) { Object source = parserContext.extractSource(element); @@ -50,10 +46,11 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { ParsingUtils.setPropertyValue(builder, element, "port", "port"); ParsingUtils.setPropertyValue(builder, element, "host", "host"); - ParsingUtils.setPropertyValue(builder, element, "credentials", "credentials"); + ParsingUtils.setPropertyValue(builder, element, "credential", "credential"); + ParsingUtils.setPropertyValue(builder, element, "replica-set", "replicaSet"); + ParsingUtils.setPropertyValue(builder, element, "connection-string", "connectionString"); - MongoParsingUtils.parseMongoClientOptions(element, builder); - MongoParsingUtils.parseReplicaSet(element, builder); + MongoParsingUtils.parseMongoClientSettings(element, builder); String defaultedId = StringUtils.hasText(id) ? id : BeanNames.MONGO_BEAN_NAME; @@ -62,22 +59,34 @@ public BeanDefinition parse(Element element, ParserContext parserContext) { BeanComponentDefinition mongoComponent = helper.getComponent(builder, defaultedId); parserContext.registerBeanComponent(mongoComponent); - BeanComponentDefinition serverAddressPropertyEditor = helper.getComponent(MongoParsingUtils - .getServerAddressPropertyEditorBuilder()); + BeanComponentDefinition connectionStringPropertyEditor = helper + .getComponent(MongoParsingUtils.getConnectionStringPropertyEditorBuilder()); + parserContext.registerBeanComponent(connectionStringPropertyEditor); + + BeanComponentDefinition serverAddressPropertyEditor = helper + .getComponent(MongoParsingUtils.getServerAddressPropertyEditorBuilder()); parserContext.registerBeanComponent(serverAddressPropertyEditor); - BeanComponentDefinition writeConcernEditor = helper.getComponent(MongoParsingUtils - .getWriteConcernPropertyEditorBuilder()); + BeanComponentDefinition writeConcernEditor = helper + .getComponent(MongoParsingUtils.getWriteConcernPropertyEditorBuilder()); parserContext.registerBeanComponent(writeConcernEditor); - BeanComponentDefinition readPreferenceEditor = helper.getComponent(MongoParsingUtils - .getReadPreferencePropertyEditorBuilder()); + BeanComponentDefinition readConcernEditor = helper + .getComponent(MongoParsingUtils.getReadConcernPropertyEditorBuilder()); + parserContext.registerBeanComponent(readConcernEditor); + + BeanComponentDefinition readPreferenceEditor = helper + .getComponent(MongoParsingUtils.getReadPreferencePropertyEditorBuilder()); parserContext.registerBeanComponent(readPreferenceEditor); - BeanComponentDefinition credentialsEditor = helper.getComponent(MongoParsingUtils - .getMongoCredentialPropertyEditor()); + BeanComponentDefinition credentialsEditor = helper + .getComponent(MongoParsingUtils.getMongoCredentialPropertyEditor()); parserContext.registerBeanComponent(credentialsEditor); + BeanComponentDefinition uuidRepresentationEditor = helper + .getComponent(MongoParsingUtils.getUUidRepresentationEditorBuilder()); + parserContext.registerBeanComponent(uuidRepresentationEditor); + parserContext.popAndRegisterContainingComponent(); return mongoComponent.getBeanDefinition(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java index 28c1821d61..b01827d8c6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoConfigurationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,33 @@ */ package org.springframework.data.mongodb.config; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Set; +import org.bson.UuidRepresentation; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider; import org.springframework.core.convert.converter.Converter; import org.springframework.core.type.filter.AnnotationTypeFilter; -import org.springframework.data.annotation.Persistent; import org.springframework.data.convert.CustomConversions; -import org.springframework.data.mapping.context.MappingContext; -import org.springframework.data.mapping.context.MappingContextIsNewStrategyFactory; -import org.springframework.data.mapping.context.PersistentEntities; import org.springframework.data.mapping.model.CamelCaseAbbreviatingFieldNamingStrategy; import org.springframework.data.mapping.model.FieldNamingStrategy; import org.springframework.data.mapping.model.PropertyNameFieldNamingStrategy; +import org.springframework.data.mongodb.MongoManagedTypes; import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions.MongoConverterConfigurationAdapter; import org.springframework.data.mongodb.core.mapping.Document; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; -import org.springframework.data.support.CachingIsNewStrategyFactory; -import org.springframework.data.support.IsNewStrategyFactory; +import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import org.springframework.util.StringUtils; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoClientSettings.Builder; + /** * Base class for Spring Data MongoDB to be extended for JavaConfiguration usage. * @@ -53,7 +53,7 @@ public abstract class MongoConfigurationSupport { /** * Return the name of the database to connect to. * - * @return must not be {@literal null}. + * @return never {@literal null}. */ protected abstract String getDatabaseName(); @@ -77,43 +77,57 @@ protected Collection getMappingBasePackages() { * Creates a {@link MongoMappingContext} equipped with entity classes scanned from the mapping base package. * * @see #getMappingBasePackages() - * @return - * @throws ClassNotFoundException + * @return never {@literal null}. */ @Bean - public MongoMappingContext mongoMappingContext() throws ClassNotFoundException { + public MongoMappingContext mongoMappingContext(MongoCustomConversions customConversions, + MongoManagedTypes mongoManagedTypes) { MongoMappingContext mappingContext = new MongoMappingContext(); - mappingContext.setInitialEntitySet(getInitialEntitySet()); - mappingContext.setSimpleTypeHolder(customConversions().getSimpleTypeHolder()); + mappingContext.setManagedTypes(mongoManagedTypes); + mappingContext.setSimpleTypeHolder(customConversions.getSimpleTypeHolder()); mappingContext.setFieldNamingStrategy(fieldNamingStrategy()); + mappingContext.setAutoIndexCreation(autoIndexCreation()); return mappingContext; } /** - * Returns a {@link MappingContextIsNewStrategyFactory} wrapped into a {@link CachingIsNewStrategyFactory}. - * - * @return + * @return new instance of {@link MongoManagedTypes}. * @throws ClassNotFoundException + * @since 4.0 */ @Bean - public IsNewStrategyFactory isNewStrategyFactory() throws ClassNotFoundException { - - return new CachingIsNewStrategyFactory(new MappingContextIsNewStrategyFactory( - new PersistentEntities(Arrays.> asList(new MappingContext[] { mongoMappingContext() })))); + public MongoManagedTypes mongoManagedTypes() throws ClassNotFoundException { + return MongoManagedTypes.fromIterable(getInitialEntitySet()); } /** * Register custom {@link Converter}s in a {@link CustomConversions} object if required. These - * {@link CustomConversions} will be registered with the {@link #mappingMongoConverter()} and - * {@link #mongoMappingContext()}. Returns an empty {@link MongoCustomConversions} instance by default. + * {@link CustomConversions} will be registered with the + * {@link org.springframework.data.mongodb.core.convert.MappingMongoConverter} and {@link MongoMappingContext}. + * Returns an empty {@link MongoCustomConversions} instance by default. + *

+ * NOTE: Use {@link #configureConverters(MongoConverterConfigurationAdapter)} to configure MongoDB + * native simple types and register custom {@link Converter converters}. * * @return must not be {@literal null}. */ @Bean - public CustomConversions customConversions() { - return new MongoCustomConversions(Collections.emptyList()); + public MongoCustomConversions customConversions() { + return MongoCustomConversions.create(this::configureConverters); + } + + /** + * Configuration hook for {@link MongoCustomConversions} creation. + * + * @param converterConfigurationAdapter never {@literal null}. + * @since 2.3 + * @see MongoConverterConfigurationAdapter#useNativeDriverJavaTimeCodecs() + * @see MongoConverterConfigurationAdapter#useSpringDataJavaTimeCodecs() + */ + protected void configureConverters(MongoConverterConfigurationAdapter converterConfigurationAdapter) { + } /** @@ -136,8 +150,7 @@ protected Set> getInitialEntitySet() throws ClassNotFoundException { } /** - * Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document} and - * {@link Persistent}. + * Scans the given base package for entities, i.e. MongoDB specific types annotated with {@link Document}. * * @param basePackage must not be {@literal null}. * @return @@ -157,12 +170,13 @@ protected Set> scanForEntities(String basePackage) throws ClassNotFound ClassPathScanningCandidateComponentProvider componentProvider = new ClassPathScanningCandidateComponentProvider( false); componentProvider.addIncludeFilter(new AnnotationTypeFilter(Document.class)); - componentProvider.addIncludeFilter(new AnnotationTypeFilter(Persistent.class)); for (BeanDefinition candidate : componentProvider.findCandidateComponents(basePackage)) { - initialEntitySet - .add(ClassUtils.forName(candidate.getBeanClassName(), MongoConfigurationSupport.class.getClassLoader())); + String beanClassName = candidate.getBeanClassName(); + Assert.notNull(beanClassName, "BeanClassName cannot be null"); + + initialEntitySet.add(ClassUtils.forName(beanClassName, MongoConfigurationSupport.class.getClassLoader())); } } @@ -171,8 +185,7 @@ protected Set> scanForEntities(String basePackage) throws ClassNotFound /** * Configures whether to abbreviate field names for domain objects by configuring a - * {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. For advanced - * customization needs, consider overriding {@link #mappingMongoConverter()}. + * {@link CamelCaseAbbreviatingFieldNamingStrategy} on the {@link MongoMappingContext} instance created. * * @return */ @@ -190,4 +203,41 @@ protected FieldNamingStrategy fieldNamingStrategy() { return abbreviateFieldNames() ? new CamelCaseAbbreviatingFieldNamingStrategy() : PropertyNameFieldNamingStrategy.INSTANCE; } + + /** + * Configure whether to automatically create indices for domain types by deriving the + * {@link org.springframework.data.mongodb.core.index.IndexDefinition} from the entity or not. + * + * @return {@literal false} by default.
+ * INFO: As of 3.x the default is set to {@literal false}; In 2.x it was {@literal true}. + * @since 2.2 + */ + protected boolean autoIndexCreation() { + return false; + } + + /** + * Return the {@link MongoClientSettings} used to create the actual {@literal MongoClient}.
+ * Override either this method, or use {@link #configureClientSettings(Builder)} to alter the setup. + * + * @return never {@literal null}. + * @since 3.0 + */ + protected MongoClientSettings mongoClientSettings() { + + MongoClientSettings.Builder builder = MongoClientSettings.builder(); + builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY); + configureClientSettings(builder); + return builder.build(); + } + + /** + * Configure {@link MongoClientSettings} via its {@link Builder} API. + * + * @param builder never {@literal null}. + * @since 3.0 + */ + protected void configureClientSettings(MongoClientSettings.Builder builder) { + // customization hook + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditor.java index 1d8e527674..93d778c861 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoCredentialPropertyEditor.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,8 +16,9 @@ package org.springframework.data.mongodb.config; import java.beans.PropertyEditorSupport; -import java.io.UnsupportedEncodingException; +import java.lang.reflect.Method; import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -25,7 +26,8 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.springframework.lang.Nullable; +import org.jspecify.annotations.Nullable; +import org.springframework.util.ReflectionUtils; import org.springframework.util.StringUtils; import com.mongodb.MongoCredential; @@ -49,10 +51,6 @@ public class MongoCredentialPropertyEditor extends PropertyEditorSupport { private static final String OPTIONS_DELIMITER = "?"; private static final String OPTION_VALUE_DELIMITER = "&"; - /* - * (non-Javadoc) - * @see java.beans.PropertyEditorSupport#setAsText(java.lang.String) - */ @Override public void setAsText(@Nullable String text) throws IllegalArgumentException { @@ -78,12 +76,23 @@ public void setAsText(@Nullable String text) throws IllegalArgumentException { verifyUserNamePresent(userNameAndPassword); credentials.add(MongoCredential.createGSSAPICredential(userNameAndPassword[0])); - } else if (MongoCredential.MONGODB_CR_MECHANISM.equals(authMechanism)) { + } else if ("MONGODB-CR".equals(authMechanism)) { verifyUsernameAndPasswordPresent(userNameAndPassword); verifyDatabasePresent(database); - credentials.add(MongoCredential.createMongoCRCredential(userNameAndPassword[0], database, - userNameAndPassword[1].toCharArray())); + + Method createCRCredentialMethod = ReflectionUtils.findMethod(MongoCredential.class, + "createMongoCRCredential", String.class, String.class, char[].class); + + if (createCRCredentialMethod == null) { + throw new IllegalArgumentException("MONGODB-CR is no longer supported."); + } + + MongoCredential credential = MongoCredential.class + .cast(ReflectionUtils.invokeMethod(createCRCredentialMethod, null, userNameAndPassword[0], database, + userNameAndPassword[1].toCharArray())); + credentials.add(credential); + } else if (MongoCredential.MONGODB_X509_MECHANISM.equals(authMechanism)) { verifyUserNamePresent(userNameAndPassword); @@ -100,9 +109,15 @@ public void setAsText(@Nullable String text) throws IllegalArgumentException { verifyDatabasePresent(database); credentials.add(MongoCredential.createScramSha1Credential(userNameAndPassword[0], database, userNameAndPassword[1].toCharArray())); + } else if (MongoCredential.SCRAM_SHA_256_MECHANISM.equals(authMechanism)) { + + verifyUsernameAndPasswordPresent(userNameAndPassword); + verifyDatabasePresent(database); + credentials.add(MongoCredential.createScramSha256Credential(userNameAndPassword[0], database, + userNameAndPassword[1].toCharArray())); } else { throw new IllegalArgumentException( - String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'!", authMechanism)); + String.format("Cannot create MongoCredentials for unknown auth mechanism '%s'", authMechanism)); } } } else { @@ -179,7 +194,7 @@ private static Properties extractOptions(String text) { String[] optionArgs = option.split("="); if (optionArgs.length == 1) { - throw new IllegalArgumentException(String.format("Query parameter '%s' has no value!", optionArgs[0])); + throw new IllegalArgumentException(String.format("Query parameter '%s' has no value", optionArgs[0])); } properties.put(optionArgs[0], optionArgs[1]); @@ -194,29 +209,25 @@ private static void verifyUsernameAndPasswordPresent(String[] source) { if (source.length != 2) { throw new IllegalArgumentException( - "Credentials need to specify username and password like in 'username:password@database'!"); + "Credentials need to specify username and password like in 'username:password@database'"); } } private static void verifyDatabasePresent(String source) { if (!StringUtils.hasText(source)) { - throw new IllegalArgumentException("Credentials need to specify database like in 'username:password@database'!"); + throw new IllegalArgumentException("Credentials need to specify database like in 'username:password@database'"); } } private static void verifyUserNamePresent(String[] source) { if (source.length == 0 || !StringUtils.hasText(source[0])) { - throw new IllegalArgumentException("Credentials need to specify username!"); + throw new IllegalArgumentException("Credentials need to specify username"); } } private static String decodeParameter(String it) { - try { - return URLDecoder.decode(it, "UTF-8"); - } catch (UnsupportedEncodingException e) { - throw new IllegalArgumentException("o_O UTF-8 not supported!", e); - } + return URLDecoder.decode(it, StandardCharsets.UTF_8); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoDbFactoryParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoDbFactoryParser.java index e85a681a3d..2d3649c53a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoDbFactoryParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoDbFactoryParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,10 +18,10 @@ import static org.springframework.data.config.ParsingUtils.*; import static org.springframework.data.mongodb.config.MongoParsingUtils.*; -import java.util.Collections; -import java.util.HashSet; import java.util.Set; +import org.jspecify.annotations.NullUnmarked; +import org.jspecify.annotations.Nullable; import org.springframework.beans.factory.BeanDefinitionStoreException; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.parsing.BeanComponentDefinition; @@ -32,14 +32,11 @@ import org.springframework.beans.factory.xml.ParserContext; import org.springframework.data.config.BeanComponentDefinitionBuilder; import org.springframework.data.mongodb.core.MongoClientFactoryBean; -import org.springframework.data.mongodb.core.SimpleMongoDbFactory; -import org.springframework.lang.Nullable; +import org.springframework.data.mongodb.core.SimpleMongoClientDatabaseFactory; import org.springframework.util.StringUtils; import org.w3c.dom.Element; -import com.mongodb.Mongo; -import com.mongodb.MongoClientURI; -import com.mongodb.MongoURI; +import com.mongodb.ConnectionString; /** * {@link BeanDefinitionParser} to parse {@code db-factory} elements into {@link BeanDefinition}s. @@ -51,23 +48,11 @@ * @author Viktor Khoroshko * @author Mark Paluch */ +@NullUnmarked public class MongoDbFactoryParser extends AbstractBeanDefinitionParser { - private static final Set MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES; + private static final Set MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES = Set.of("id", "write-concern"); - static { - - Set mongoUriAllowedAdditionalAttributes = new HashSet(); - mongoUriAllowedAdditionalAttributes.add("id"); - mongoUriAllowedAdditionalAttributes.add("write-concern"); - - MONGO_URI_ALLOWED_ADDITIONAL_ATTRIBUTES = Collections.unmodifiableSet(mongoUriAllowedAdditionalAttributes); - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext) throws BeanDefinitionStoreException { @@ -76,18 +61,15 @@ protected String resolveId(Element element, AbstractBeanDefinition definition, P return StringUtils.hasText(id) ? id : BeanNames.DB_FACTORY_BEAN_NAME; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) { // Common setup - BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder.genericBeanDefinition(SimpleMongoDbFactory.class); + BeanDefinitionBuilder dbFactoryBuilder = BeanDefinitionBuilder + .genericBeanDefinition(SimpleMongoClientDatabaseFactory.class); setPropertyValue(dbFactoryBuilder, element, "write-concern", "writeConcern"); - BeanDefinition mongoUri = getMongoUri(element, parserContext); + BeanDefinition mongoUri = getConnectionString(element, parserContext); if (mongoUri != null) { @@ -97,7 +79,8 @@ protected AbstractBeanDefinition parseInternal(Element element, ParserContext pa BeanComponentDefinitionBuilder helper = new BeanComponentDefinitionBuilder(element, parserContext); - String mongoRef = element.getAttribute("mongo-ref"); + String mongoRef = element.getAttribute("mongo-client-ref"); + String dbname = element.getAttribute("dbname"); // Defaulting @@ -119,8 +102,8 @@ protected AbstractBeanDefinition parseInternal(Element element, ParserContext pa } /** - * Registers a default {@link BeanDefinition} of a {@link Mongo} instance and returns the name under which the - * {@link Mongo} instance was registered under. + * Registers a default {@link BeanDefinition} of a {@link com.mongodb.client.MongoClient} instance and returns the + * name under which the {@link com.mongodb.client.MongoClient} instance was registered under. * * @param element must not be {@literal null}. * @param parserContext must not be {@literal null}. @@ -136,8 +119,7 @@ private BeanDefinition registerMongoBeanDefinition(Element element, ParserContex } /** - * Creates a {@link BeanDefinition} for a {@link MongoURI} or {@link MongoClientURI} depending on configured - * attributes.
+ * Creates a {@link BeanDefinition} for a {@link ConnectionString} depending on configured attributes.
* Errors when configured element contains {@literal uri} or {@literal client-uri} along with other attributes except * {@literal write-concern} and/or {@literal id}. * @@ -145,12 +127,19 @@ private BeanDefinition registerMongoBeanDefinition(Element element, ParserContex * @param parserContext * @return {@literal null} in case no client-/uri defined. */ - @Nullable - private BeanDefinition getMongoUri(Element element, ParserContext parserContext) { + private @Nullable BeanDefinition getConnectionString(Element element, ParserContext parserContext) { - boolean hasClientUri = element.hasAttribute("client-uri"); + String type = null; - if (!hasClientUri && !element.hasAttribute("uri")) { + if (element.hasAttribute("client-uri")) { + type = "client-uri"; + } else if (element.hasAttribute("connection-string")) { + type = "connection-string"; + } else if (element.hasAttribute("uri")) { + type = "uri"; + } + + if (!StringUtils.hasText(type)) { return null; } @@ -164,16 +153,12 @@ private BeanDefinition getMongoUri(Element element, ParserContext parserContext) if (element.getAttributes().getLength() > allowedAttributesCount) { - parserContext.getReaderContext().error( - "Configure either " + (hasClientUri ? "Mongo Client URI" : "Mongo URI") + " or details individually!", + parserContext.getReaderContext().error("Configure either MongoDB " + type + " or details individually", parserContext.extractSource(element)); } - Class type = MongoClientURI.class; - String uri = hasClientUri ? element.getAttribute("client-uri") : element.getAttribute("uri"); - - BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(type); - builder.addConstructorArgValue(uri); + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(ConnectionString.class); + builder.addConstructorArgValue(element.getAttribute(type)); return builder.getBeanDefinition(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoJmxParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoJmxParser.java deleted file mode 100644 index b2aa54d985..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoJmxParser.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.config; - -import org.springframework.beans.factory.config.BeanDefinition; -import org.springframework.beans.factory.parsing.BeanComponentDefinition; -import org.springframework.beans.factory.parsing.CompositeComponentDefinition; -import org.springframework.beans.factory.support.BeanDefinitionBuilder; -import org.springframework.beans.factory.xml.BeanDefinitionParser; -import org.springframework.beans.factory.xml.ParserContext; -import org.springframework.data.mongodb.core.MongoAdmin; -import org.springframework.data.mongodb.monitor.*; -import org.springframework.util.StringUtils; -import org.w3c.dom.Element; - -/** - * @author Mark Pollack - * @author Thomas Risberg - * @author John Brisbin - * @author Oliver Gierke - * @author Christoph Strobl - */ -public class MongoJmxParser implements BeanDefinitionParser { - - public BeanDefinition parse(Element element, ParserContext parserContext) { - String name = element.getAttribute("mongo-ref"); - if (!StringUtils.hasText(name)) { - name = BeanNames.MONGO_BEAN_NAME; - } - registerJmxComponents(name, element, parserContext); - return null; - } - - protected void registerJmxComponents(String mongoRefName, Element element, ParserContext parserContext) { - Object eleSource = parserContext.extractSource(element); - - CompositeComponentDefinition compositeDef = new CompositeComponentDefinition(element.getTagName(), eleSource); - - createBeanDefEntry(AssertMetrics.class, compositeDef, mongoRefName, eleSource, parserContext); - createBeanDefEntry(BackgroundFlushingMetrics.class, compositeDef, mongoRefName, eleSource, parserContext); - createBeanDefEntry(BtreeIndexCounters.class, compositeDef, mongoRefName, eleSource, parserContext); - createBeanDefEntry(ConnectionMetrics.class, compositeDef, mongoRefName, eleSource, parserContext); - createBeanDefEntry(GlobalLockMetrics.class, compositeDef, mongoRefName, eleSource, parserContext); - createBeanDefEntry(MemoryMetrics.class, compositeDef, mongoRefName, eleSource, parserContext); - createBeanDefEntry(OperationCounters.class, compositeDef, mongoRefName, eleSource, parserContext); - createBeanDefEntry(ServerInfo.class, compositeDef, mongoRefName, eleSource, parserContext); - createBeanDefEntry(MongoAdmin.class, compositeDef, mongoRefName, eleSource, parserContext); - - parserContext.registerComponent(compositeDef); - - } - - protected void createBeanDefEntry(Class clazz, CompositeComponentDefinition compositeDef, String mongoRefName, - Object eleSource, ParserContext parserContext) { - BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(clazz); - builder.getRawBeanDefinition().setSource(eleSource); - builder.addConstructorArgReference(mongoRefName); - BeanDefinition assertDef = builder.getBeanDefinition(); - String assertName = parserContext.getReaderContext().registerWithGeneratedName(assertDef); - compositeDef.addNestedComponent(new BeanComponentDefinition(assertDef, assertName)); - } - -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoNamespaceHandler.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoNamespaceHandler.java index fb2ab93ccc..62a4a1082d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoNamespaceHandler.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoNamespaceHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -26,16 +26,11 @@ */ public class MongoNamespaceHandler extends NamespaceHandlerSupport { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.NamespaceHandler#init() - */ public void init() { registerBeanDefinitionParser("mapping-converter", new MappingMongoConverterParser()); registerBeanDefinitionParser("mongo-client", new MongoClientParser()); registerBeanDefinitionParser("db-factory", new MongoDbFactoryParser()); - registerBeanDefinitionParser("jmx", new MongoJmxParser()); registerBeanDefinitionParser("auditing", new MongoAuditingBeanDefinitionParser()); registerBeanDefinitionParser("template", new MongoTemplateParser()); registerBeanDefinitionParser("gridFsTemplate", new GridFsTemplateParser()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java index 5caf989a70..00e993fdc8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoParsingUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,12 +19,16 @@ import java.util.Map; +import org.jspecify.annotations.NullUnmarked; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.config.CustomEditorConfigurer; import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.support.BeanDefinitionValidationException; import org.springframework.beans.factory.support.ManagedMap; import org.springframework.beans.factory.xml.BeanDefinitionParser; -import org.springframework.data.mongodb.core.MongoClientOptionsFactoryBean; +import org.springframework.data.mongodb.core.MongoClientSettingsFactoryBean; +import org.springframework.data.mongodb.core.MongoServerApiFactoryBean; +import org.springframework.util.StringUtils; import org.springframework.util.xml.DomUtils; import org.w3c.dom.Element; @@ -35,66 +39,100 @@ * @author Oliver Gierke * @author Thomas Darimont * @author Christoph Strobl + * @author Mark Paluch */ -@SuppressWarnings("deprecation") +@NullUnmarked abstract class MongoParsingUtils { private MongoParsingUtils() {} /** - * Parses the mongo replica-set element. - * - * @param parserContext the parser context - * @param element the mongo element - * @param mongoBuilder the bean definition builder to populate - * @return - */ - static void parseReplicaSet(Element element, BeanDefinitionBuilder mongoBuilder) { - setPropertyValue(mongoBuilder, element, "replica-set", "replicaSetSeeds"); - } - - /** - * Parses the {@code mongo:client-options} sub-element. Populates the given attribute factory with the proper + * Parses the {@code mongo:client-settings} sub-element. Populates the given attribute factory with the proper * attributes. * - * @param element must not be {@literal null}. - * @param mongoClientBuilder must not be {@literal null}. + * @param element + * @param mongoClientBuilder * @return - * @since 1.7 + * @since 3.0 */ - public static boolean parseMongoClientOptions(Element element, BeanDefinitionBuilder mongoClientBuilder) { - - Element optionsElement = DomUtils.getChildElementByTagName(element, "client-options"); + public static boolean parseMongoClientSettings(Element element, BeanDefinitionBuilder mongoClientBuilder) { - if (optionsElement == null) { + Element settingsElement = DomUtils.getChildElementByTagName(element, "client-settings"); + if (settingsElement == null) { return false; } BeanDefinitionBuilder clientOptionsDefBuilder = BeanDefinitionBuilder - .genericBeanDefinition(MongoClientOptionsFactoryBean.class); - - setPropertyValue(clientOptionsDefBuilder, optionsElement, "description", "description"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "min-connections-per-host", "minConnectionsPerHost"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "connections-per-host", "connectionsPerHost"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "threads-allowed-to-block-for-connection-multiplier", - "threadsAllowedToBlockForConnectionMultiplier"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-wait-time", "maxWaitTime"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-connection-idle-time", "maxConnectionIdleTime"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "max-connection-life-time", "maxConnectionLifeTime"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "connect-timeout", "connectTimeout"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "socket-timeout", "socketTimeout"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "socket-keep-alive", "socketKeepAlive"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "read-preference", "readPreference"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "write-concern", "writeConcern"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-frequency", "heartbeatFrequency"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "min-heartbeat-frequency", "minHeartbeatFrequency"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-connect-timeout", "heartbeatConnectTimeout"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "heartbeat-socket-timeout", "heartbeatSocketTimeout"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "ssl", "ssl"); - setPropertyReference(clientOptionsDefBuilder, optionsElement, "ssl-socket-factory-ref", "sslSocketFactory"); - setPropertyValue(clientOptionsDefBuilder, optionsElement, "server-selection-timeout", "serverSelectionTimeout"); - - mongoClientBuilder.addPropertyValue("mongoClientOptions", clientOptionsDefBuilder.getBeanDefinition()); + .genericBeanDefinition(MongoClientSettingsFactoryBean.class); + + setPropertyValue(clientOptionsDefBuilder, settingsElement, "application-name", "applicationName"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "read-preference", "readPreference"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "read-concern", "readConcern"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "write-concern", "writeConcern"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "retry-reads", "retryReads"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "retry-writes", "retryWrites"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "uuid-representation", "uUidRepresentation"); + + // SocketSettings + setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-connect-timeout", "socketConnectTimeoutMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-read-timeout", "socketReadTimeoutMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-receive-buffer-size", "socketReceiveBufferSize"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "socket-send-buffer-size", "socketSendBufferSize"); + + // Server Settings + setPropertyValue(clientOptionsDefBuilder, settingsElement, "server-heartbeat-frequency", + "serverHeartbeatFrequencyMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "server-min-heartbeat-frequency", + "serverMinHeartbeatFrequencyMS"); + + // Cluster Settings + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-srv-host", "clusterSrvHost"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-hosts", "clusterHosts"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-connection-mode", "clusterConnectionMode"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-type", "custerRequiredClusterType"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-local-threshold", "clusterLocalThresholdMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "cluster-server-selection-timeout", + "clusterServerSelectionTimeoutMS"); + + // Connection Pool Settings + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-size", "poolMaxSize"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-min-size", "poolMinSize"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-wait-time", "poolMaxWaitTimeMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-connection-life-time", + "poolMaxConnectionLifeTimeMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-max-connection-idle-time", + "poolMaxConnectionIdleTimeMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-maintenance-initial-delay", + "poolMaintenanceInitialDelayMS"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "connection-pool-maintenance-frequency", + "poolMaintenanceFrequencyMS"); + + // SSL Settings + setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-enabled", "sslEnabled"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-invalid-host-name-allowed", + "sslInvalidHostNameAllowed"); + setPropertyValue(clientOptionsDefBuilder, settingsElement, "ssl-provider", "sslProvider"); + + // Field level encryption + setPropertyReference(clientOptionsDefBuilder, settingsElement, "encryption-settings-ref", "autoEncryptionSettings"); + + // ServerAPI + if (StringUtils.hasText(settingsElement.getAttribute("server-api-version"))) { + + MongoServerApiFactoryBean serverApiFactoryBean = new MongoServerApiFactoryBean(); + serverApiFactoryBean.setVersion(settingsElement.getAttribute("server-api-version")); + try { + clientOptionsDefBuilder.addPropertyValue("serverApi", serverApiFactoryBean.getObject()); + } catch (Exception exception) { + throw new BeanDefinitionValidationException("Non parsable server-api.", exception); + } + } else { + setPropertyReference(clientOptionsDefBuilder, settingsElement, "server-api-ref", "serverApi"); + } + + // and the rest + + mongoClientBuilder.addPropertyValue("mongoClientSettings", clientOptionsDefBuilder.getBeanDefinition()); return true; } @@ -116,6 +154,24 @@ static BeanDefinitionBuilder getWriteConcernPropertyEditorBuilder() { return builder; } + /** + * Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a + * {@link ReadConcernPropertyEditor}. + * + * @return + * @since 3.0 + */ + static BeanDefinitionBuilder getReadConcernPropertyEditorBuilder() { + + Map> customEditors = new ManagedMap<>(); + customEditors.put("com.mongodb.ReadConcern", ReadConcernPropertyEditor.class); + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); + builder.addPropertyValue("customEditors", customEditors); + + return builder; + } + /** * One should only register one bean definition but want to have the convenience of using * AbstractSingleBeanDefinitionParser but have the side effect of registering a 'default' property editor with the @@ -123,7 +179,7 @@ static BeanDefinitionBuilder getWriteConcernPropertyEditorBuilder() { */ static BeanDefinitionBuilder getServerAddressPropertyEditorBuilder() { - Map customEditors = new ManagedMap(); + Map customEditors = new ManagedMap<>(); customEditors.put("com.mongodb.ServerAddress[]", "org.springframework.data.mongodb.config.ServerAddressPropertyEditor"); @@ -141,7 +197,7 @@ static BeanDefinitionBuilder getServerAddressPropertyEditorBuilder() { */ static BeanDefinitionBuilder getReadPreferencePropertyEditorBuilder() { - Map> customEditors = new ManagedMap>(); + Map> customEditors = new ManagedMap<>(); customEditors.put("com.mongodb.ReadPreference", ReadPreferencePropertyEditor.class); BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); @@ -167,4 +223,41 @@ static BeanDefinitionBuilder getMongoCredentialPropertyEditor() { return builder; } + + /** + * Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a + * {@link ConnectionStringPropertyEditor}. + * + * @return + * @since 3.0 + */ + static BeanDefinitionBuilder getConnectionStringPropertyEditorBuilder() { + + Map> customEditors = new ManagedMap<>(); + customEditors.put("com.mongodb.ConnectionString", ConnectionStringPropertyEditor.class); + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); + builder.addPropertyValue("customEditors", customEditors); + + return builder; + } + + /** + * Returns the {@link BeanDefinitionBuilder} to build a {@link BeanDefinition} for a + * {@link ConnectionStringPropertyEditor}. + * + * @return + * @since 3.0 + */ + static BeanDefinitionBuilder getUUidRepresentationEditorBuilder() { + + Map> customEditors = new ManagedMap<>(); + customEditors.put("org.bson.UuidRepresentation", UUidRepresentationPropertyEditor.class); + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.genericBeanDefinition(CustomEditorConfigurer.class); + builder.addPropertyValue("customEditors", customEditors); + + return builder; + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoTemplateParser.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoTemplateParser.java index 96763fd2d9..5053e540fe 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoTemplateParser.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/MongoTemplateParser.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,6 +18,7 @@ import static org.springframework.data.config.ParsingUtils.*; import static org.springframework.data.mongodb.config.MongoParsingUtils.*; +import org.jspecify.annotations.NullUnmarked; import org.springframework.beans.factory.BeanDefinitionStoreException; import org.springframework.beans.factory.config.BeanDefinition; import org.springframework.beans.factory.parsing.BeanComponentDefinition; @@ -37,12 +38,9 @@ * @author Martin Baumgartner * @author Oliver Gierke */ +@NullUnmarked class MongoTemplateParser extends AbstractBeanDefinitionParser { - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#resolveId(org.w3c.dom.Element, org.springframework.beans.factory.support.AbstractBeanDefinition, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected String resolveId(Element element, AbstractBeanDefinition definition, ParserContext parserContext) throws BeanDefinitionStoreException { @@ -51,10 +49,6 @@ protected String resolveId(Element element, AbstractBeanDefinition definition, P return StringUtils.hasText(id) ? id : BeanNames.MONGO_TEMPLATE_BEAN_NAME; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.xml.AbstractBeanDefinitionParser#parseInternal(org.w3c.dom.Element, org.springframework.beans.factory.xml.ParserContext) - */ @Override protected AbstractBeanDefinition parseInternal(Element element, ParserContext parserContext) { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java new file mode 100644 index 0000000000..e46701a7f3 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/PersistentEntitiesFactoryBean.java @@ -0,0 +1,53 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import org.springframework.beans.factory.FactoryBean; +import org.springframework.data.mapping.context.PersistentEntities; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; + +/** + * Simple helper to be able to wire the {@link PersistentEntities} from a {@link MappingMongoConverter} bean available + * in the application context. + * + * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl + * @since 3.1 + */ +public class PersistentEntitiesFactoryBean implements FactoryBean { + + private final MappingMongoConverter converter; + + /** + * Creates a new {@link PersistentEntitiesFactoryBean} for the given {@link MappingMongoConverter}. + * + * @param converter must not be {@literal null}. + */ + public PersistentEntitiesFactoryBean(MappingMongoConverter converter) { + this.converter = converter; + } + + @Override + public PersistentEntities getObject() { + return PersistentEntities.of(converter.getMappingContext()); + } + + @Override + public Class getObjectType() { + return PersistentEntities.class; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReactiveMongoAuditingRegistrar.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReactiveMongoAuditingRegistrar.java new file mode 100644 index 0000000000..80cf404434 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReactiveMongoAuditingRegistrar.java @@ -0,0 +1,81 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import java.lang.annotation.Annotation; + +import org.springframework.beans.factory.config.BeanDefinition; +import org.springframework.beans.factory.support.BeanDefinitionBuilder; +import org.springframework.beans.factory.support.BeanDefinitionRegistry; +import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; +import org.springframework.data.auditing.ReactiveIsNewAwareAuditingHandler; +import org.springframework.data.auditing.config.AuditingBeanDefinitionRegistrarSupport; +import org.springframework.data.auditing.config.AuditingConfiguration; +import org.springframework.data.config.ParsingUtils; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAuditingEntityCallback; +import org.springframework.util.Assert; + +/** + * {@link ImportBeanDefinitionRegistrar} to enable {@link EnableReactiveMongoAuditing} annotation. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 3.1 + */ +class ReactiveMongoAuditingRegistrar extends AuditingBeanDefinitionRegistrarSupport { + + @Override + protected Class getAnnotation() { + return EnableReactiveMongoAuditing.class; + } + + @Override + protected String getAuditingHandlerBeanName() { + return "reactiveMongoAuditingHandler"; + } + + @Override + protected void postProcess(BeanDefinitionBuilder builder, AuditingConfiguration configuration, + BeanDefinitionRegistry registry) { + builder.setFactoryMethod("from").addConstructorArgReference("mongoMappingContext"); + } + + @Override + protected BeanDefinitionBuilder getAuditHandlerBeanDefinitionBuilder(AuditingConfiguration configuration) { + + Assert.notNull(configuration, "AuditingConfiguration must not be null"); + + return configureDefaultAuditHandlerAttributes(configuration, + BeanDefinitionBuilder.rootBeanDefinition(ReactiveIsNewAwareAuditingHandler.class)); + } + + @Override + protected void registerAuditListenerBeanDefinition(BeanDefinition auditingHandlerDefinition, + BeanDefinitionRegistry registry) { + + Assert.notNull(auditingHandlerDefinition, "BeanDefinition must not be null"); + Assert.notNull(registry, "BeanDefinitionRegistry must not be null"); + + BeanDefinitionBuilder builder = BeanDefinitionBuilder.rootBeanDefinition(ReactiveAuditingEntityCallback.class); + + builder.addConstructorArgValue(ParsingUtils.getObjectFactoryBeanDefinition(getAuditingHandlerBeanName(), registry)); + builder.getRawBeanDefinition().setSource(auditingHandlerDefinition.getSource()); + + registerInfrastructureBeanWithId(builder.getBeanDefinition(), ReactiveAuditingEntityCallback.class.getName(), + registry); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadConcernPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadConcernPropertyEditor.java new file mode 100644 index 0000000000..3f5cb0ca62 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadConcernPropertyEditor.java @@ -0,0 +1,44 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import java.beans.PropertyEditorSupport; + +import org.jspecify.annotations.Nullable; +import org.springframework.util.StringUtils; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadConcernLevel; + +/** + * Parse a {@link String} to a {@link ReadConcern}. If it is a well know {@link String} as identified by the + * {@link ReadConcernLevel#fromString(String)}. + * + * @author Christoph Strobl + * @since 3.0 + */ +public class ReadConcernPropertyEditor extends PropertyEditorSupport { + + @Override + public void setAsText(@Nullable String readConcernString) { + + if (!StringUtils.hasText(readConcernString)) { + return; + } + + setValue(new ReadConcern(ReadConcernLevel.fromString(readConcernString))); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditor.java index dbbe7aadfd..f24c435348 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ReadPreferencePropertyEditor.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,10 +17,10 @@ import java.beans.PropertyEditorSupport; -import org.springframework.lang.Nullable; - import com.mongodb.ReadPreference; +import org.jspecify.annotations.Nullable; + /** * Parse a {@link String} to a {@link ReadPreference}. * @@ -29,10 +29,6 @@ */ public class ReadPreferencePropertyEditor extends PropertyEditorSupport { - /* - * (non-Javadoc) - * @see java.beans.PropertyEditorSupport#setAsText(java.lang.String) - */ @Override public void setAsText(@Nullable String readPreferenceString) throws IllegalArgumentException { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditor.java index c2ba9a675d..9ff59e5b22 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/ServerAddressPropertyEditor.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,9 +21,9 @@ import java.util.HashSet; import java.util.Set; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.lang.Nullable; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.jspecify.annotations.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -43,13 +43,9 @@ public class ServerAddressPropertyEditor extends PropertyEditorSupport { * A port is a number without a leading 0 at the end of the address that is proceeded by just a single :. */ private static final String HOST_PORT_SPLIT_PATTERN = "(? 2) { - LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source); + if (LOG.isWarnEnabled()) { + LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "source", source)); + } return null; } @@ -105,9 +104,13 @@ private ServerAddress parseServerAddress(String source) { return port == null ? new ServerAddress(hostAddress) : new ServerAddress(hostAddress, port); } catch (UnknownHostException e) { - LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0]); + if (LOG.isWarnEnabled()) { + LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "host", hostAndPort[0])); + } } catch (NumberFormatException e) { - LOG.warn(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1]); + if (LOG.isWarnEnabled()) { + LOG.warn(String.format(COULD_NOT_PARSE_ADDRESS_MESSAGE, "port", hostAndPort[1])); + } } return null; @@ -121,7 +124,7 @@ private ServerAddress parseServerAddress(String source) { */ private String[] extractHostAddressAndPort(String addressAndPortSource) { - Assert.notNull(addressAndPortSource, "Address and port source must not be null!"); + Assert.notNull(addressAndPortSource, "Address and port source must not be null"); String[] hostAndPort = addressAndPortSource.split(HOST_PORT_SPLIT_PATTERN); String hostAddress = hostAndPort[0]; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/StringToWriteConcernConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/StringToWriteConcernConverter.java index a3583df058..9f579b8fe9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/StringToWriteConcernConverter.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/StringToWriteConcernConverter.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -26,10 +26,6 @@ */ public class StringToWriteConcernConverter implements Converter { - /* - * (non-Javadoc) - * @see org.springframework.core.convert.converter.Converter#convert(java.lang.Object) - */ public WriteConcern convert(String source) { WriteConcern writeConcern = WriteConcern.valueOf(source); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/UUidRepresentationPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/UUidRepresentationPropertyEditor.java new file mode 100644 index 0000000000..23c15102ac --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/UUidRepresentationPropertyEditor.java @@ -0,0 +1,41 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.config; + +import java.beans.PropertyEditorSupport; + +import org.bson.UuidRepresentation; +import org.jspecify.annotations.Nullable; +import org.springframework.util.StringUtils; + +/** + * Parse a {@link String} to a {@link UuidRepresentation}. + * + * @author Christoph Strobl + * @since 3.0 + */ +public class UUidRepresentationPropertyEditor extends PropertyEditorSupport { + + @Override + public void setAsText(@Nullable String value) { + + if (!StringUtils.hasText(value)) { + return; + } + + setValue(UuidRepresentation.valueOf(value)); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditor.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditor.java index f8c26ece38..32c19e24c3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditor.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/WriteConcernPropertyEditor.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,7 +17,7 @@ import java.beans.PropertyEditorSupport; -import org.springframework.lang.Nullable; +import org.jspecify.annotations.Nullable; import org.springframework.util.StringUtils; import com.mongodb.WriteConcern; @@ -34,7 +34,7 @@ public class WriteConcernPropertyEditor extends PropertyEditorSupport { /** - * Parse a string to a List + * Parse a string to a {@link WriteConcern}. */ @Override public void setAsText(@Nullable String writeConcernString) { @@ -51,6 +51,5 @@ public void setAsText(@Nullable String writeConcernString) { // pass on the string to the constructor setValue(new WriteConcern(writeConcernString)); } - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/package-info.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/package-info.java index 5a1e5b725e..555cc9f66e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/package-info.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/config/package-info.java @@ -1,6 +1,6 @@ /** * Spring XML namespace configuration for MongoDB specific repositories. */ -@org.springframework.lang.NonNullApi +@org.jspecify.annotations.NullMarked package org.springframework.data.mongodb.config; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/AggregationUtil.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/AggregationUtil.java index edddf85a7a..ec7c368eaf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/AggregationUtil.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/AggregationUtil.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,31 +15,21 @@ */ package org.springframework.data.mongodb.core; -import lombok.AllArgsConstructor; - -import java.util.Arrays; -import java.util.Collections; import java.util.List; -import java.util.Optional; -import java.util.stream.Collectors; import org.bson.Document; +import org.jspecify.annotations.Nullable; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mongodb.core.aggregation.Aggregation; -import org.springframework.data.mongodb.core.aggregation.AggregationOperation; import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; -import org.springframework.data.mongodb.core.aggregation.AggregationOptions; -import org.springframework.data.mongodb.core.aggregation.CountOperation; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions.DomainTypeMapping; +import org.springframework.data.mongodb.core.aggregation.FieldLookupPolicy; import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.data.mongodb.core.query.CriteriaDefinition; -import org.springframework.data.mongodb.core.query.Query; -import org.springframework.lang.Nullable; -import org.springframework.util.Assert; -import org.springframework.util.ObjectUtils; +import org.springframework.data.util.Lazy; /** * Utility methods to map {@link org.springframework.data.mongodb.core.aggregation.Aggregation} pipeline definitions and @@ -49,34 +39,41 @@ * @author Mark Paluch * @since 2.1 */ -@AllArgsConstructor class AggregationUtil { - QueryMapper queryMapper; - MappingContext, MongoPersistentProperty> mappingContext; + final QueryMapper queryMapper; + final MappingContext, MongoPersistentProperty> mappingContext; + final Lazy untypedMappingContext; - /** - * Prepare the {@link AggregationOperationContext} for a given aggregation by either returning the context itself it - * is not {@literal null}, create a {@link TypeBasedAggregationOperationContext} if the aggregation contains type - * information (is a {@link TypedAggregation}) or use the {@link Aggregation#DEFAULT_CONTEXT}. - * - * @param aggregation must not be {@literal null}. - * @param context can be {@literal null}. - * @return the root {@link AggregationOperationContext} to use. - */ - AggregationOperationContext prepareAggregationContext(Aggregation aggregation, - @Nullable AggregationOperationContext context) { + AggregationUtil(QueryMapper queryMapper, + MappingContext, MongoPersistentProperty> mappingContext) { + + this.queryMapper = queryMapper; + this.mappingContext = mappingContext; + this.untypedMappingContext = Lazy.of(() -> new TypeBasedAggregationOperationContext(Object.class, mappingContext, + queryMapper, FieldLookupPolicy.relaxed())); + } + + AggregationOperationContext createAggregationContext(Aggregation aggregation, @Nullable Class inputType) { + + DomainTypeMapping domainTypeMapping = aggregation.getOptions().getDomainTypeMapping(); + + if (domainTypeMapping == DomainTypeMapping.NONE) { + return Aggregation.DEFAULT_CONTEXT; + } - if (context != null) { - return context; + FieldLookupPolicy lookupPolicy = domainTypeMapping == DomainTypeMapping.STRICT + && !aggregation.getPipeline().containsUnionWith() ? FieldLookupPolicy.strict() : FieldLookupPolicy.relaxed(); + + if (aggregation instanceof TypedAggregation ta) { + return new TypeBasedAggregationOperationContext(ta.getInputType(), mappingContext, queryMapper, lookupPolicy); } - if (aggregation instanceof TypedAggregation) { - return new TypeBasedAggregationOperationContext(((TypedAggregation) aggregation).getInputType(), mappingContext, - queryMapper); + if (inputType == null) { + return untypedMappingContext.get(); } - return Aggregation.DEFAULT_CONTEXT; + return new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper, lookupPolicy); } /** @@ -87,12 +84,7 @@ AggregationOperationContext prepareAggregationContext(Aggregation aggregation, * @return */ List createPipeline(Aggregation aggregation, AggregationOperationContext context) { - - if (!ObjectUtils.nullSafeEquals(context, Aggregation.DEFAULT_CONTEXT)) { - return aggregation.toPipeline(context); - } - - return mapAggregationPipeline(aggregation.toPipeline(context)); + return aggregation.toPipeline(context); } /** @@ -103,68 +95,7 @@ List createPipeline(Aggregation aggregation, AggregationOperationConte * @return */ Document createCommand(String collection, Aggregation aggregation, AggregationOperationContext context) { - - Document command = aggregation.toDocument(collection, context); - - if (!ObjectUtils.nullSafeEquals(context, Aggregation.DEFAULT_CONTEXT)) { - return command; - } - - command.put("pipeline", mapAggregationPipeline(command.get("pipeline", List.class))); - - return command; + return aggregation.toDocument(collection, context); } - /** - * Create a {@code $count} aggregation for {@link Query} and optionally a {@link Class entity class}. - * - * @param query must not be {@literal null}. - * @param entityClass can be {@literal null} if the {@link Query} object is empty. - * @return the {@link Aggregation} pipeline definition to run a {@code $count} aggregation. - */ - Aggregation createCountAggregation(Query query, @Nullable Class entityClass) { - - List pipeline = computeCountAggregationPipeline(query, entityClass); - - Aggregation aggregation = entityClass != null ? Aggregation.newAggregation(entityClass, pipeline) - : Aggregation.newAggregation(pipeline); - aggregation.withOptions(AggregationOptions.builder().collation(query.getCollation().orElse(null)).build()); - - return aggregation; - } - - private List computeCountAggregationPipeline(Query query, @Nullable Class entityType) { - - CountOperation count = Aggregation.count().as("totalEntityCount"); - if (query.getQueryObject().isEmpty()) { - return Collections.singletonList(count); - } - - Assert.notNull(entityType, "Entity type must not be null!"); - - Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), - mappingContext.getPersistentEntity(entityType)); - - CriteriaDefinition criteria = new CriteriaDefinition() { - - @Override - public Document getCriteriaObject() { - return mappedQuery; - } - - @Nullable - @Override - public String getKey() { - return null; - } - }; - - return Arrays.asList(Aggregation.match(criteria), count); - } - - private List mapAggregationPipeline(List pipeline) { - - return pipeline.stream().map(val -> queryMapper.getMappedObject(val, Optional.empty())) - .collect(Collectors.toList()); - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java index 179e1475bb..4820c2355c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,20 +17,36 @@ import java.util.List; +import org.springframework.data.domain.Sort; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.util.Pair; import com.mongodb.bulk.BulkWriteResult; /** - * Bulk operations for insert/update/remove actions on a collection. These bulks operation are available since MongoDB - * 2.6 and make use of low level bulk commands on the protocol level. This interface defines a fluent API to add - * multiple single operations or list of similar operations in sequence which can then eventually be executed by calling + * Bulk operations for insert/update/remove actions on a collection. Bulk operations are available since MongoDB 2.6 and + * make use of low level bulk commands on the protocol level. This interface defines a fluent API to add multiple single + * operations or list of similar operations in sequence which can then eventually be executed by calling * {@link #execute()}. * + *

+ * MongoOperations ops = …;
+ *
+ * ops.bulkOps(BulkMode.UNORDERED, Person.class)
+ * 				.insert(newPerson)
+ * 				.updateOne(where("firstname").is("Joe"), Update.update("lastname", "Doe"))
+ * 				.execute();
+ * 
+ *

+ * Bulk operations are issued as one batch that pulls together all insert, update, and delete operations. Operations + * that require individual operation results such as optimistic locking (using {@code @Version}) are not supported and + * the version field remains not populated. + * * @author Tobias Trelle * @author Oliver Gierke + * @author Minsu Kim * @since 1.9 */ public interface BulkOperations { @@ -45,7 +61,7 @@ enum BulkMode { /** Perform bulk operations in parallel. Processing will continue on errors. */ UNORDERED - }; + } /** * Add a single insert to the bulk operation. @@ -66,11 +82,25 @@ enum BulkMode { /** * Add a single update to the bulk operation. For the update request, only the first matching document is updated. * - * @param query update criteria, must not be {@literal null}. + * @param query update criteria, must not be {@literal null}. The {@link Query} may define a {@link Query#with(Sort) + * sort order} to influence which document to update when potentially matching multiple candidates. + * @param update {@link Update} operation to perform, must not be {@literal null}. + * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + */ + default BulkOperations updateOne(Query query, Update update) { + return updateOne(query, (UpdateDefinition) update); + } + + /** + * Add a single update to the bulk operation. For the update request, only the first matching document is updated. + * + * @param query update criteria, must not be {@literal null}. The {@link Query} may define a {@link Query#with(Sort) + * sort order} to influence which document to update when potentially matching multiple candidates. * @param update {@link Update} operation to perform, must not be {@literal null}. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + * @since 4.1 */ - BulkOperations updateOne(Query query, Update update); + BulkOperations updateOne(Query query, UpdateDefinition update); /** * Add a list of updates to the bulk operation. For each update request, only the first matching document is updated. @@ -78,7 +108,18 @@ enum BulkMode { * @param updates Update operations to perform. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. */ - BulkOperations updateOne(List> updates); + BulkOperations updateOne(List> updates); + + /** + * Add a single update to the bulk operation. For the update request, all matching documents are updated. + * + * @param query Update criteria. + * @param update Update operation to perform. + * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + */ + default BulkOperations updateMulti(Query query, Update update) { + return updateMulti(query, (UpdateDefinition) update); + } /** * Add a single update to the bulk operation. For the update request, all matching documents are updated. @@ -86,17 +127,17 @@ enum BulkMode { * @param query Update criteria. * @param update Update operation to perform. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + * @since 4.1 */ - BulkOperations updateMulti(Query query, Update update); + BulkOperations updateMulti(Query query, UpdateDefinition update); /** * Add a list of updates to the bulk operation. For each update request, all matching documents are updated. * * @param updates Update operations to perform. - * @return The bulk operation. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. */ - BulkOperations updateMulti(List> updates); + BulkOperations updateMulti(List> updates); /** * Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty, @@ -104,17 +145,28 @@ enum BulkMode { * * @param query Update criteria. * @param update Update operation to perform. - * @return The bulk operation. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. */ - BulkOperations upsert(Query query, Update update); + default BulkOperations upsert(Query query, Update update) { + return upsert(query, (UpdateDefinition) update); + } + + /** + * Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty, + * else an insert. + * + * @param query Update criteria. + * @param update Update operation to perform. + * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. + * @since 4.1 + */ + BulkOperations upsert(Query query, UpdateDefinition update); /** * Add a list of upserts to the bulk operation. An upsert is an update if the set of matching documents is not empty, * else an insert. * * @param updates Updates/insert operations to perform. - * @return The bulk operation. * @return the current {@link BulkOperations} instance with the update added, will never be {@literal null}. */ BulkOperations upsert(List> updates); @@ -135,6 +187,31 @@ enum BulkMode { */ BulkOperations remove(List removes); + /** + * Add a single replace operation to the bulk operation. + * + * @param query Replace criteria. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence + * which document to replace when potentially matching multiple candidates. + * @param replacement the replacement document. Must not be {@literal null}. + * @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}. + * @since 2.2 + */ + default BulkOperations replaceOne(Query query, Object replacement) { + return replaceOne(query, replacement, FindAndReplaceOptions.empty()); + } + + /** + * Add a single replace operation to the bulk operation. + * + * @param query Replace criteria. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence + * which document to replace when potentially matching multiple candidates. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @return the current {@link BulkOperations} instance with the replacement added, will never be {@literal null}. + * @since 2.2 + */ + BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options); + /** * Execute all bulk operations using the default write concern. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperationsSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperationsSupport.java new file mode 100644 index 0000000000..1f5509cd60 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/BulkOperationsSupport.java @@ -0,0 +1,243 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +import org.bson.Document; +import org.bson.conversions.Bson; +import org.springframework.context.ApplicationEvent; +import org.springframework.data.mapping.PersistentEntity; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.util.Assert; + +import com.mongodb.client.model.BulkWriteOptions; +import com.mongodb.client.model.DeleteManyModel; +import com.mongodb.client.model.DeleteOneModel; +import com.mongodb.client.model.InsertOneModel; +import com.mongodb.client.model.ReplaceOneModel; +import com.mongodb.client.model.UpdateManyModel; +import com.mongodb.client.model.UpdateOneModel; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.client.model.WriteModel; + +/** + * Support class for bulk operations. + * + * @author Mark Paluch + * @since 4.1 + */ +abstract class BulkOperationsSupport { + + private final String collectionName; + + BulkOperationsSupport(String collectionName) { + + Assert.hasText(collectionName, "CollectionName must not be null nor empty"); + + this.collectionName = collectionName; + } + + /** + * Emit a {@link BeforeSaveEvent}. + * + * @param holder + */ + void maybeEmitBeforeSaveEvent(SourceAwareWriteModelHolder holder) { + + if (holder.model() instanceof InsertOneModel) { + + Document target = ((InsertOneModel) holder.model()).getDocument(); + maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName)); + } else if (holder.model() instanceof ReplaceOneModel) { + + Document target = ((ReplaceOneModel) holder.model()).getReplacement(); + maybeEmitEvent(new BeforeSaveEvent<>(holder.source(), target, collectionName)); + } + } + + /** + * Emit a {@link AfterSaveEvent}. + * + * @param holder + */ + void maybeEmitAfterSaveEvent(SourceAwareWriteModelHolder holder) { + + if (holder.model() instanceof InsertOneModel) { + + Document target = ((InsertOneModel) holder.model()).getDocument(); + maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName)); + } else if (holder.model() instanceof ReplaceOneModel) { + + Document target = ((ReplaceOneModel) holder.model()).getReplacement(); + maybeEmitEvent(new AfterSaveEvent<>(holder.source(), target, collectionName)); + } + } + + WriteModel mapWriteModel(Object source, WriteModel writeModel) { + + if (writeModel instanceof UpdateOneModel model) { + + Bson sort = model.getOptions().getSort(); + if (sort instanceof Document sortDocument) { + model.getOptions().sort(updateMapper().getMappedSort(sortDocument, entity().orElse(null))); + } + + if (source instanceof AggregationUpdate aggregationUpdate) { + + List pipeline = mapUpdatePipeline(aggregationUpdate); + return new UpdateOneModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions()); + } + + return new UpdateOneModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()), + model.getOptions()); + } + + if (writeModel instanceof UpdateManyModel model) { + + if (source instanceof AggregationUpdate aggregationUpdate) { + + List pipeline = mapUpdatePipeline(aggregationUpdate); + return new UpdateManyModel<>(getMappedQuery(model.getFilter()), pipeline, model.getOptions()); + } + + return new UpdateManyModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()), + model.getOptions()); + } + + if (writeModel instanceof DeleteOneModel model) { + return new DeleteOneModel<>(getMappedQuery(model.getFilter()), model.getOptions()); + } + + if (writeModel instanceof DeleteManyModel model) { + return new DeleteManyModel<>(getMappedQuery(model.getFilter()), model.getOptions()); + } + + if (writeModel instanceof ReplaceOneModel model) { + + Bson sort = model.getReplaceOptions().getSort(); + + if (sort instanceof Document sortDocument) { + model.getReplaceOptions().sort(updateMapper().getMappedSort(sortDocument, entity().orElse(null))); + } + return new ReplaceOneModel<>(getMappedQuery(model.getFilter()), model.getReplacement(), + model.getReplaceOptions()); + } + + return writeModel; + } + + private List mapUpdatePipeline(AggregationUpdate source) { + + Class type = entity().isPresent() ? entity().map(PersistentEntity::getType).get() : Object.class; + AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type, + updateMapper().getMappingContext(), queryMapper()); + + return new AggregationUtil(queryMapper(), queryMapper().getMappingContext()).createPipeline(source, context); + } + + /** + * Emit a {@link ApplicationEvent} if event multicasting is enabled. + * + * @param event + */ + protected abstract void maybeEmitEvent(ApplicationEvent event); + + /** + * @return the {@link UpdateMapper} to use. + */ + protected abstract UpdateMapper updateMapper(); + + /** + * @return the {@link QueryMapper} to use. + */ + protected abstract QueryMapper queryMapper(); + + /** + * @return the associated {@link PersistentEntity}. Can be {@link Optional#empty()}. + */ + protected abstract Optional> entity(); + + protected Bson getMappedUpdate(Bson update) { + return updateMapper().getMappedObject(update, entity()); + } + + protected Bson getMappedQuery(Bson query) { + return queryMapper().getMappedObject(query, entity()); + } + + protected static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) { + + BulkWriteOptions options = new BulkWriteOptions(); + + return switch (bulkMode) { + case ORDERED -> options.ordered(true); + case UNORDERED -> options.ordered(false); + }; + } + + /** + * @param filterQuery The {@link Query} to read a potential {@link Collation} from. Must not be {@literal null}. + * @param update The {@link Update} to apply + * @param upsert flag to indicate if document should be upserted. + * @param multi flag to indicate if update might affect multiple documents. + * @return new instance of {@link UpdateOptions}. + */ + protected UpdateOptions computeUpdateOptions(Query filterQuery, UpdateDefinition update, boolean upsert, + boolean multi) { + + UpdateOptions options = new UpdateOptions(); + options.upsert(upsert); + + if (update.hasArrayFilters()) { + List list = new ArrayList<>(update.getArrayFilters().size()); + for (ArrayFilter arrayFilter : update.getArrayFilters()) { + list.add(arrayFilter.asDocument()); + } + options.arrayFilters(list); + } + + if (!multi && filterQuery.isSorted()) { + options.sort(filterQuery.getSortObject()); + } + + filterQuery.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation); + return options; + } + + /** + * Value object chaining together an actual source with its {@link WriteModel} representation. + * + * @author Christoph Strobl + */ + record SourceAwareWriteModelHolder(Object source, WriteModel model) { + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamEvent.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamEvent.java index b25b3eb4fb..8a74ace28b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamEvent.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamEvent.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,17 @@ */ package org.springframework.data.mongodb.core; -import lombok.EqualsAndHashCode; - import java.time.Instant; import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; +import org.bson.BsonTimestamp; import org.bson.BsonValue; import org.bson.Document; +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.messaging.Message; -import org.springframework.lang.Nullable; import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; import com.mongodb.client.model.changestream.ChangeStreamDocument; import com.mongodb.client.model.changestream.OperationType; @@ -36,22 +36,29 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Myroslav Kosinskyi * @since 2.1 */ -@EqualsAndHashCode public class ChangeStreamEvent { @SuppressWarnings("rawtypes") // - private static final AtomicReferenceFieldUpdater CONVERTED_UPDATER = AtomicReferenceFieldUpdater - .newUpdater(ChangeStreamEvent.class, Object.class, "converted"); + private static final AtomicReferenceFieldUpdater CONVERTED_FULL_DOCUMENT_UPDATER = AtomicReferenceFieldUpdater + .newUpdater(ChangeStreamEvent.class, Object.class, "convertedFullDocument"); + + @SuppressWarnings("rawtypes") // + private static final AtomicReferenceFieldUpdater CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER = AtomicReferenceFieldUpdater + .newUpdater(ChangeStreamEvent.class, Object.class, "convertedFullDocumentBeforeChange"); private final @Nullable ChangeStreamDocument raw; private final Class targetType; private final MongoConverter converter; - // accessed through CONVERTED_UPDATER. - private volatile @Nullable T converted; + // accessed through CONVERTED_FULL_DOCUMENT_UPDATER. + private volatile @Nullable T convertedFullDocument; + + // accessed through CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER. + private volatile @Nullable T convertedFullDocumentBeforeChange; /** * @param raw can be {@literal null}. @@ -71,8 +78,7 @@ public ChangeStreamEvent(@Nullable ChangeStreamDocument raw, Class * * @return can be {@literal null}. */ - @Nullable - public ChangeStreamDocument getRaw() { + public @Nullable ChangeStreamDocument getRaw() { return raw; } @@ -81,9 +87,22 @@ public ChangeStreamDocument getRaw() { * * @return can be {@literal null}. */ + public @Nullable Instant getTimestamp() { + + return getBsonTimestamp() != null && raw != null + ? converter.getConversionService().convert(raw.getClusterTime(), Instant.class) + : null; + } + + /** + * Get the {@link ChangeStreamDocument#getClusterTime() cluster time}. + * + * @return can be {@literal null}. + * @since 2.2 + */ @Nullable - public Instant getTimestamp() { - return raw != null && raw.getClusterTime() != null ? Instant.ofEpochMilli(raw.getClusterTime().getValue()) : null; + public BsonTimestamp getBsonTimestamp() { + return raw != null ? raw.getClusterTime() : null; } /** @@ -91,8 +110,7 @@ public Instant getTimestamp() { * * @return can be {@literal null}. */ - @Nullable - public BsonValue getResumeToken() { + public @Nullable BsonValue getResumeToken() { return raw != null ? raw.getResumeToken() : null; } @@ -101,8 +119,7 @@ public BsonValue getResumeToken() { * * @return can be {@literal null}. */ - @Nullable - public OperationType getOperationType() { + public @Nullable OperationType getOperationType() { return raw != null ? raw.getOperationType() : null; } @@ -111,8 +128,7 @@ public OperationType getOperationType() { * * @return can be {@literal null}. */ - @Nullable - public String getDatabaseName() { + public @Nullable String getDatabaseName() { return raw != null ? raw.getNamespace().getDatabaseName() : null; } @@ -121,8 +137,7 @@ public String getDatabaseName() { * * @return can be {@literal null}. */ - @Nullable - public String getCollectionName() { + public @Nullable String getCollectionName() { return raw != null ? raw.getNamespace().getCollectionName() : null; } @@ -132,30 +147,46 @@ public String getCollectionName() { * @return {@literal null} when {@link #getRaw()} or {@link ChangeStreamDocument#getFullDocument()} is * {@literal null}. */ - @Nullable - public T getBody() { + public @Nullable T getBody() { - if (raw == null) { + if (raw == null || raw.getFullDocument() == null) { return null; } - Document fullDocument = raw.getFullDocument(); + return getConvertedFullDocument(raw.getFullDocument()); + } + + /** + * Get the potentially converted {@link ChangeStreamDocument#getFullDocumentBeforeChange() document} before being + * changed. + * + * @return {@literal null} when {@link #getRaw()} or {@link ChangeStreamDocument#getFullDocumentBeforeChange()} is + * {@literal null}. + * @since 4.0 + */ + public @Nullable T getBodyBeforeChange() { - if (fullDocument == null) { - return targetType.cast(fullDocument); + if (raw == null || raw.getFullDocumentBeforeChange() == null) { + return null; } - return getConverted(fullDocument); + return getConvertedFullDocumentBeforeChange(raw.getFullDocumentBeforeChange()); } @SuppressWarnings("unchecked") - private T getConverted(Document fullDocument) { - return (T) doGetConverted(fullDocument); + private T getConvertedFullDocumentBeforeChange(Document fullDocument) { + return (T) doGetConverted(fullDocument, CONVERTED_FULL_DOCUMENT_BEFORE_CHANGE_UPDATER); } - private Object doGetConverted(Document fullDocument) { + @SuppressWarnings("unchecked") + private T getConvertedFullDocument(Document fullDocument) { + return (T) doGetConverted(fullDocument, CONVERTED_FULL_DOCUMENT_UPDATER); + } + + @SuppressWarnings("NullAway") + private Object doGetConverted(Document fullDocument, AtomicReferenceFieldUpdater updater) { - Object result = CONVERTED_UPDATER.get(this); + Object result = updater.get(this); if (result != null) { return result; @@ -164,25 +195,44 @@ private Object doGetConverted(Document fullDocument) { if (ClassUtils.isAssignable(Document.class, fullDocument.getClass())) { result = converter.read(targetType, fullDocument); - return CONVERTED_UPDATER.compareAndSet(this, null, result) ? result : CONVERTED_UPDATER.get(this); + return updater.compareAndSet(this, null, result) ? result : updater.get(this); } if (converter.getConversionService().canConvert(fullDocument.getClass(), targetType)) { result = converter.getConversionService().convert(fullDocument, targetType); - return CONVERTED_UPDATER.compareAndSet(this, null, result) ? result : CONVERTED_UPDATER.get(this); + return updater.compareAndSet(this, null, result) ? result : updater.get(this); } - throw new IllegalArgumentException(String.format("No converter found capable of converting %s to %s", - fullDocument.getClass(), targetType)); + throw new IllegalArgumentException( + String.format("No converter found capable of converting %s to %s", fullDocument.getClass(), targetType)); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return "ChangeStreamEvent {" + "raw=" + raw + ", targetType=" + targetType + '}'; } + + @Override + public boolean equals(@Nullable Object o) { + + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + ChangeStreamEvent that = (ChangeStreamEvent) o; + + if (!ObjectUtils.nullSafeEquals(this.raw, that.raw)) { + return false; + } + return ObjectUtils.nullSafeEquals(this.targetType, that.targetType); + } + + @Override + public int hashCode() { + int result = raw != null ? raw.hashCode() : 0; + result = 31 * result + ObjectUtils.nullSafeHashCode(targetType); + return result; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java index 42b0de8c1a..9c99b0e01f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ChangeStreamOptions.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,21 +15,25 @@ */ package org.springframework.data.mongodb.core; -import lombok.EqualsAndHashCode; - import java.time.Instant; import java.util.Arrays; import java.util.Optional; +import org.bson.BsonDocument; +import org.bson.BsonTimestamp; import org.bson.BsonValue; import org.bson.Document; +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.query.Collation; -import org.springframework.lang.Nullable; +import org.springframework.lang.Contract; import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.ObjectUtils; import com.mongodb.client.model.changestream.ChangeStreamDocument; import com.mongodb.client.model.changestream.FullDocument; +import com.mongodb.client.model.changestream.FullDocumentBeforeChange; /** * Options applicable to MongoDB Change Streams. Intended @@ -38,16 +42,18 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Myroslav Kosinskyi * @since 2.1 */ -@EqualsAndHashCode public class ChangeStreamOptions { private @Nullable Object filter; private @Nullable BsonValue resumeToken; private @Nullable FullDocument fullDocumentLookup; + private @Nullable FullDocumentBeforeChange fullDocumentBeforeChangeLookup; private @Nullable Collation collation; - private @Nullable Instant resumeTimestamp; + private @Nullable Object resumeTimestamp; + private Resume resume = Resume.UNDEFINED; protected ChangeStreamOptions() {} @@ -72,6 +78,14 @@ public Optional getFullDocumentLookup() { return Optional.ofNullable(fullDocumentLookup); } + /** + * @return {@link Optional#empty()} if not set. + * @since 4.0 + */ + public Optional getFullDocumentBeforeChangeLookup() { + return Optional.ofNullable(fullDocumentBeforeChangeLookup); + } + /** * @return {@link Optional#empty()} if not set. */ @@ -83,7 +97,31 @@ public Optional getCollation() { * @return {@link Optional#empty()} if not set. */ public Optional getResumeTimestamp() { - return Optional.ofNullable(resumeTimestamp); + return Optional.ofNullable(resumeTimestamp).map(timestamp -> asTimestampOfType(timestamp, Instant.class)); + } + + /** + * @return {@link Optional#empty()} if not set. + * @since 2.2 + */ + public Optional getResumeBsonTimestamp() { + return Optional.ofNullable(resumeTimestamp).map(timestamp -> asTimestampOfType(timestamp, BsonTimestamp.class)); + } + + /** + * @return {@literal true} if the change stream should be started after the {@link #getResumeToken() token}. + * @since 2.2 + */ + public boolean isStartAfter() { + return Resume.START_AFTER.equals(resume); + } + + /** + * @return {@literal true} if the change stream should be resumed after the {@link #getResumeToken() token}. + * @since 2.2 + */ + public boolean isResumeAfter() { + return Resume.RESUME_AFTER.equals(resume); } /** @@ -95,7 +133,7 @@ public static ChangeStreamOptions empty() { /** * Obtain a shiny new {@link ChangeStreamOptionsBuilder} and start defining options in this fancy fluent way. Just - * don't forget to call {@link ChangeStreamOptionsBuilder#build() build()} when your're done. + * don't forget to call {@link ChangeStreamOptionsBuilder#build() build()} when done. * * @return new instance of {@link ChangeStreamOptionsBuilder}. */ @@ -103,6 +141,90 @@ public static ChangeStreamOptionsBuilder builder() { return new ChangeStreamOptionsBuilder(); } + private static T asTimestampOfType(Object timestamp, Class targetType) { + return targetType.cast(doGetTimestamp(timestamp, targetType)); + } + + private static Object doGetTimestamp(Object timestamp, Class targetType) { + + if (ClassUtils.isAssignableValue(targetType, timestamp)) { + return timestamp; + } + + if (timestamp instanceof Instant instant) { + return new BsonTimestamp((int) instant.getEpochSecond(), 0); + } + + if (timestamp instanceof BsonTimestamp bsonTimestamp) { + return Instant.ofEpochSecond(bsonTimestamp.getTime()); + } + + throw new IllegalArgumentException( + "o_O that should actually not happen; The timestamp should be an Instant or a BsonTimestamp but was " + + ObjectUtils.nullSafeClassName(timestamp)); + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + ChangeStreamOptions that = (ChangeStreamOptions) o; + + if (!ObjectUtils.nullSafeEquals(this.filter, that.filter)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(this.resumeToken, that.resumeToken)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(this.fullDocumentLookup, that.fullDocumentLookup)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(this.fullDocumentBeforeChangeLookup, that.fullDocumentBeforeChangeLookup)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(this.collation, that.collation)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(this.resumeTimestamp, that.resumeTimestamp)) { + return false; + } + return resume == that.resume; + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(filter); + result = 31 * result + ObjectUtils.nullSafeHashCode(resumeToken); + result = 31 * result + ObjectUtils.nullSafeHashCode(fullDocumentLookup); + result = 31 * result + ObjectUtils.nullSafeHashCode(fullDocumentBeforeChangeLookup); + result = 31 * result + ObjectUtils.nullSafeHashCode(collation); + result = 31 * result + ObjectUtils.nullSafeHashCode(resumeTimestamp); + result = 31 * result + ObjectUtils.nullSafeHashCode(resume); + return result; + } + + /** + * @author Christoph Strobl + * @since 2.2 + */ + enum Resume { + + UNDEFINED, + + /** + * @see com.mongodb.client.ChangeStreamIterable#startAfter(BsonDocument) + */ + START_AFTER, + + /** + * @see com.mongodb.client.ChangeStreamIterable#resumeAfter(BsonDocument) + */ + RESUME_AFTER + } + /** * Builder for creating {@link ChangeStreamOptions}. * @@ -114,8 +236,10 @@ public static class ChangeStreamOptionsBuilder { private @Nullable Object filter; private @Nullable BsonValue resumeToken; private @Nullable FullDocument fullDocumentLookup; + private @Nullable FullDocumentBeforeChange fullDocumentBeforeChangeLookup; private @Nullable Collation collation; - private @Nullable Instant resumeTimestamp; + private @Nullable Object resumeTimestamp; + private Resume resume = Resume.UNDEFINED; private ChangeStreamOptionsBuilder() {} @@ -125,23 +249,22 @@ private ChangeStreamOptionsBuilder() {} * @param collation must not be {@literal null} nor {@literal empty}. * @return this. */ + @Contract("_ -> this") public ChangeStreamOptionsBuilder collation(Collation collation) { - Assert.notNull(collation, "Collation must not be null nor empty!"); + Assert.notNull(collation, "Collation must not be null nor empty"); this.collation = collation; return this; } /** - * Set the filter to apply. - *

+ * Set the filter to apply.
* Fields on aggregation expression root level are prefixed to map to fields contained in * {@link ChangeStreamDocument#getFullDocument() fullDocument}. However {@literal operationType}, {@literal ns}, * {@literal documentKey} and {@literal fullDocument} are reserved words that will be omitted, and therefore taken * as given, during the mapping procedure. You may want to have a look at the - * structure of Change Events. - *

+ * structure of Change Events.
* Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to ensure filter expressions are * mapped to domain type fields. * @@ -149,9 +272,10 @@ public ChangeStreamOptionsBuilder collation(Collation collation) { * {@literal null}. * @return this. */ + @Contract("_ -> this") public ChangeStreamOptionsBuilder filter(Aggregation filter) { - Assert.notNull(filter, "Filter must not be null!"); + Assert.notNull(filter, "Filter must not be null"); this.filter = filter; return this; @@ -163,6 +287,7 @@ public ChangeStreamOptionsBuilder filter(Aggregation filter) { * @param filter must not be {@literal null} nor contain {@literal null} values. * @return this. */ + @Contract("_ -> this") public ChangeStreamOptionsBuilder filter(Document... filter) { Assert.noNullElements(filter, "Filter must not contain null values"); @@ -178,11 +303,17 @@ public ChangeStreamOptionsBuilder filter(Document... filter) { * @param resumeToken must not be {@literal null}. * @return this. */ + @Contract("_ -> this") public ChangeStreamOptionsBuilder resumeToken(BsonValue resumeToken) { - Assert.notNull(resumeToken, "ResumeToken must not be null!"); + Assert.notNull(resumeToken, "ResumeToken must not be null"); this.resumeToken = resumeToken; + + if (this.resume == Resume.UNDEFINED) { + this.resume = Resume.RESUME_AFTER; + } + return this; } @@ -202,40 +333,120 @@ public ChangeStreamOptionsBuilder returnFullDocumentOnUpdate() { * @param lookup must not be {@literal null}. * @return this. */ + @Contract("_ -> this") public ChangeStreamOptionsBuilder fullDocumentLookup(FullDocument lookup) { - Assert.notNull(lookup, "Lookup must not be null!"); + Assert.notNull(lookup, "Lookup must not be null"); this.fullDocumentLookup = lookup; return this; } + /** + * Set the {@link FullDocumentBeforeChange} lookup to use. + * + * @param lookup must not be {@literal null}. + * @return this. + * @since 4.0 + */ + @Contract("_ -> this") + public ChangeStreamOptionsBuilder fullDocumentBeforeChangeLookup(FullDocumentBeforeChange lookup) { + + Assert.notNull(lookup, "Lookup must not be null"); + + this.fullDocumentBeforeChangeLookup = lookup; + return this; + } + + /** + * Return the full document before being changed if it is available. + * + * @return this. + * @since 4.0 + * @see #fullDocumentBeforeChangeLookup(FullDocumentBeforeChange) + */ + public ChangeStreamOptionsBuilder returnFullDocumentBeforeChange() { + return fullDocumentBeforeChangeLookup(FullDocumentBeforeChange.WHEN_AVAILABLE); + } + /** * Set the cluster time to resume from. * * @param resumeTimestamp must not be {@literal null}. * @return this. */ + @Contract("_ -> this") public ChangeStreamOptionsBuilder resumeAt(Instant resumeTimestamp) { - Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null!"); + Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null"); + + this.resumeTimestamp = resumeTimestamp; + return this; + } + + /** + * Set the cluster time to resume from. + * + * @param resumeTimestamp must not be {@literal null}. + * @return this. + * @since 2.2 + */ + @Contract("_ -> this") + public ChangeStreamOptionsBuilder resumeAt(BsonTimestamp resumeTimestamp) { + + Assert.notNull(resumeTimestamp, "ResumeTimestamp must not be null"); this.resumeTimestamp = resumeTimestamp; return this; } + /** + * Set the resume token after which to continue emitting notifications. + * + * @param resumeToken must not be {@literal null}. + * @return this. + * @since 2.2 + */ + @Contract("_ -> this") + public ChangeStreamOptionsBuilder resumeAfter(BsonValue resumeToken) { + + resumeToken(resumeToken); + this.resume = Resume.RESUME_AFTER; + + return this; + } + + /** + * Set the resume token after which to start emitting notifications. + * + * @param resumeToken must not be {@literal null}. + * @return this. + * @since 2.2 + */ + @Contract("_ -> this") + public ChangeStreamOptionsBuilder startAfter(BsonValue resumeToken) { + + resumeToken(resumeToken); + this.resume = Resume.START_AFTER; + + return this; + } + /** * @return the built {@link ChangeStreamOptions} */ + @Contract("-> new") public ChangeStreamOptions build() { ChangeStreamOptions options = new ChangeStreamOptions(); - options.filter = filter; - options.resumeToken = resumeToken; - options.fullDocumentLookup = fullDocumentLookup; - options.collation = collation; - options.resumeTimestamp = resumeTimestamp; + options.filter = this.filter; + options.resumeToken = this.resumeToken; + options.fullDocumentLookup = this.fullDocumentLookup; + options.fullDocumentBeforeChangeLookup = this.fullDocumentBeforeChangeLookup; + options.collation = this.collation; + options.resumeTimestamp = this.resumeTimestamp; + options.resume = this.resume; return options; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionCallback.java index c7ad700cba..bf8be5ba69 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,8 +16,8 @@ package org.springframework.data.mongodb.core; import org.bson.Document; +import org.jspecify.annotations.Nullable; import org.springframework.dao.DataAccessException; -import org.springframework.lang.Nullable; import com.mongodb.MongoException; import com.mongodb.client.MongoCollection; @@ -29,7 +29,7 @@ * @author Grame Rocher * @author Oliver Gierke * @author John Brisbin - * @auhtor Christoph Strobl + * @author Christoph Strobl * @since 1.0 */ public interface CollectionCallback { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java index 7abfbb86e8..f4d1891703 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionOptions.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,37 @@ */ package org.springframework.data.mongodb.core; -import lombok.RequiredArgsConstructor; - +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; import java.util.Optional; +import java.util.function.Function; +import java.util.stream.StreamSupport; +import org.bson.BsonBinary; +import org.bson.BsonBinarySubType; +import org.bson.BsonNull; +import org.bson.Document; +import org.jspecify.annotations.Nullable; +import org.springframework.data.mongodb.core.mapping.Field; import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.QueryableJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.core.schema.QueryCharacteristic; +import org.springframework.data.mongodb.core.timeseries.Granularity; +import org.springframework.data.mongodb.core.timeseries.GranularityDefinition; import org.springframework.data.mongodb.core.validation.Validator; import org.springframework.data.util.Optionals; -import org.springframework.lang.Nullable; +import org.springframework.lang.CheckReturnValue; +import org.springframework.lang.Contract; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; import com.mongodb.client.model.ValidationAction; import com.mongodb.client.model.ValidationLevel; @@ -36,6 +57,8 @@ * @author Christoph Strobl * @author Mark Paluch * @author Andreas Zink + * @author Ben Foster + * @author Ross Lawley */ public class CollectionOptions { @@ -44,29 +67,23 @@ public class CollectionOptions { private @Nullable Boolean capped; private @Nullable Collation collation; private ValidationOptions validationOptions; - - /** - * Constructs a new CollectionOptions instance. - * - * @param size the collection size in bytes, this data space is preallocated. Can be {@literal null}. - * @param maxDocuments the maximum number of documents in the collection. Can be {@literal null}. - * @param capped true to created a "capped" collection (fixed size with auto-FIFO behavior based on insertion order), - * false otherwise. Can be {@literal null}. - * @deprecated since 2.0 please use {@link CollectionOptions#empty()} as entry point. - */ - @Deprecated - public CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped) { - this(size, maxDocuments, capped, null, ValidationOptions.none()); - } + private @Nullable TimeSeriesOptions timeSeriesOptions; + private @Nullable CollectionChangeStreamOptions changeStreamOptions; + private @Nullable EncryptedFieldsOptions encryptedFieldsOptions; private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nullable Boolean capped, - @Nullable Collation collation, ValidationOptions validationOptions) { + @Nullable Collation collation, ValidationOptions validationOptions, @Nullable TimeSeriesOptions timeSeriesOptions, + @Nullable CollectionChangeStreamOptions changeStreamOptions, + @Nullable EncryptedFieldsOptions encryptedFieldsOptions) { this.maxDocuments = maxDocuments; this.size = size; this.capped = capped; this.collation = collation; this.validationOptions = validationOptions; + this.timeSeriesOptions = timeSeriesOptions; + this.changeStreamOptions = changeStreamOptions; + this.encryptedFieldsOptions = encryptedFieldsOptions; } /** @@ -78,9 +95,9 @@ private CollectionOptions(@Nullable Long size, @Nullable Long maxDocuments, @Nul */ public static CollectionOptions just(Collation collation) { - Assert.notNull(collation, "Collation must not be null!"); + Assert.notNull(collation, "Collation must not be null"); - return new CollectionOptions(null, null, null, collation, ValidationOptions.none()); + return new CollectionOptions(null, null, null, collation, ValidationOptions.none(), null, null, null); } /** @@ -90,18 +107,97 @@ public static CollectionOptions just(Collation collation) { * @since 2.0 */ public static CollectionOptions empty() { - return new CollectionOptions(null, null, null, null, ValidationOptions.none()); + return new CollectionOptions(null, null, null, null, ValidationOptions.none(), null, null, null); + } + + /** + * Quick way to set up {@link CollectionOptions} for a Time Series collection. For more advanced settings use + * {@link #timeSeries(String, Function)}. + * + * @param timeField The name of the property which contains the date in each time series document. Must not be + * {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @see #timeSeries(TimeSeriesOptions) + * @since 3.3 + */ + public static CollectionOptions timeSeries(String timeField) { + return timeSeries(timeField, it -> it); + } + + /** + * Set up {@link CollectionOptions} for a Time Series collection. + * + * @param timeField the name of the field that contains the date in each time series document. + * @param options a function to apply additional settings to {@link TimeSeriesOptions}. + * @return new instance of {@link CollectionOptions}. + * @since 4.4 + */ + public static CollectionOptions timeSeries(String timeField, Function options) { + return empty().timeSeries(options.apply(TimeSeriesOptions.timeSeries(timeField))); + } + + /** + * Quick way to set up {@link CollectionOptions} for emitting (pre & post) change events. + * + * @return new instance of {@link CollectionOptions}. + * @see #changeStream(CollectionChangeStreamOptions) + * @see CollectionChangeStreamOptions#preAndPostImages(boolean) + * @since 4.0 + */ + public static CollectionOptions emitChangedRevisions() { + return empty().changeStream(CollectionChangeStreamOptions.preAndPostImages(true)); + } + + /** + * Create new {@link CollectionOptions} with the given {@code encryptedFields}. + * + * @param encryptedFieldsOptions can be null + * @return new instance of {@link CollectionOptions}. + * @since 4.5.0 + */ + @Contract("_ -> new") + @CheckReturnValue + public static CollectionOptions encryptedCollection(@Nullable EncryptedFieldsOptions encryptedFieldsOptions) { + return new CollectionOptions(null, null, null, null, ValidationOptions.NONE, null, null, encryptedFieldsOptions); + } + + /** + * Create new {@link CollectionOptions} reading encryption options from the given {@link MongoJsonSchema}. + * + * @param schema must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 4.5.0 + */ + @Contract("_ -> new") + @CheckReturnValue + public static CollectionOptions encryptedCollection(MongoJsonSchema schema) { + return encryptedCollection(EncryptedFieldsOptions.fromSchema(schema)); + } + + /** + * Create new {@link CollectionOptions} building encryption options in a fluent style. + * + * @param optionsFunction must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 4.5.0 + */ + @Contract("_ -> new") + @CheckReturnValue + public static CollectionOptions encryptedCollection( + Function optionsFunction) { + return encryptedCollection(optionsFunction.apply(new EncryptedFieldsOptions())); } /** * Create new {@link CollectionOptions} with already given settings and capped set to {@literal true}.
- * NOTE Using capped collections requires defining {@link #size(int)}. + * NOTE: Using capped collections requires defining {@link #size(long)}. * * @return new {@link CollectionOptions}. * @since 2.0 */ public CollectionOptions capped() { - return new CollectionOptions(size, maxDocuments, true, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, true, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); } /** @@ -112,7 +208,8 @@ public CollectionOptions capped() { * @since 2.0 */ public CollectionOptions maxDocuments(long maxDocuments) { - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); } /** @@ -123,7 +220,8 @@ public CollectionOptions maxDocuments(long maxDocuments) { * @since 2.0 */ public CollectionOptions size(long size) { - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); } /** @@ -134,19 +232,20 @@ public CollectionOptions size(long size) { * @since 2.0 */ public CollectionOptions collation(@Nullable Collation collation) { - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); } /** * Create new {@link CollectionOptions} with already given settings and {@code validationOptions} set to given * {@link MongoJsonSchema}. * - * @param schema can be {@literal null}. + * @param schema must not be {@literal null}. * @return new {@link CollectionOptions}. * @since 2.1 */ - public CollectionOptions schema(@Nullable MongoJsonSchema schema) { - return validator(Validator.schema(schema)); + public CollectionOptions schema(MongoJsonSchema schema) { + return validator(schema != null ? Validator.schema(schema) : null); } /** @@ -226,7 +325,7 @@ public CollectionOptions failOnValidationError() { */ public CollectionOptions schemaValidationLevel(ValidationLevel validationLevel) { - Assert.notNull(validationLevel, "ValidationLevel must not be null!"); + Assert.notNull(validationLevel, "ValidationLevel must not be null"); return validation(validationOptions.validationLevel(validationLevel)); } @@ -240,7 +339,7 @@ public CollectionOptions schemaValidationLevel(ValidationLevel validationLevel) */ public CollectionOptions schemaValidationAction(ValidationAction validationAction) { - Assert.notNull(validationAction, "ValidationAction must not be null!"); + Assert.notNull(validationAction, "ValidationAction must not be null"); return validation(validationOptions.validationAction(validationAction)); } @@ -253,8 +352,52 @@ public CollectionOptions schemaValidationAction(ValidationAction validationActio */ public CollectionOptions validation(ValidationOptions validationOptions) { - Assert.notNull(validationOptions, "ValidationOptions must not be null!"); - return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions); + Assert.notNull(validationOptions, "ValidationOptions must not be null"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}. + * + * @param timeSeriesOptions must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 3.3 + */ + public CollectionOptions timeSeries(TimeSeriesOptions timeSeriesOptions) { + + Assert.notNull(timeSeriesOptions, "TimeSeriesOptions must not be null"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Create new {@link CollectionOptions} with the given {@link TimeSeriesOptions}. + * + * @param changeStreamOptions must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + * @since 3.3 + */ + public CollectionOptions changeStream(CollectionChangeStreamOptions changeStreamOptions) { + + Assert.notNull(changeStreamOptions, "ChangeStreamOptions must not be null"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); + } + + /** + * Set the {@link EncryptedFieldsOptions} for collections using queryable encryption. + * + * @param encryptedFieldsOptions must not be {@literal null}. + * @return new instance of {@link CollectionOptions}. + */ + @Contract("_ -> new") + @CheckReturnValue + public CollectionOptions encrypted(EncryptedFieldsOptions encryptedFieldsOptions) { + + Assert.notNull(encryptedFieldsOptions, "EncryptedCollectionOptions must not be null"); + return new CollectionOptions(size, maxDocuments, capped, collation, validationOptions, timeSeriesOptions, + changeStreamOptions, encryptedFieldsOptions); } /** @@ -305,6 +448,94 @@ public Optional getValidationOptions() { return validationOptions.isEmpty() ? Optional.empty() : Optional.of(validationOptions); } + /** + * Get the {@link TimeSeriesOptions} if available. + * + * @return {@link Optional#empty()} if not specified. + * @since 3.3 + */ + public Optional getTimeSeriesOptions() { + return Optional.ofNullable(timeSeriesOptions); + } + + /** + * Get the {@link CollectionChangeStreamOptions} if available. + * + * @return {@link Optional#empty()} if not specified. + * @since 4.0 + */ + public Optional getChangeStreamOptions() { + return Optional.ofNullable(changeStreamOptions); + } + + /** + * Get the {@code encryptedFields} if available. + * + * @return {@link Optional#empty()} if not specified. + * @since 4.5 + */ + public Optional getEncryptedFieldsOptions() { + return Optional.ofNullable(encryptedFieldsOptions); + } + + @Override + public String toString() { + return "CollectionOptions{" + "maxDocuments=" + maxDocuments + ", size=" + size + ", capped=" + capped + + ", collation=" + collation + ", validationOptions=" + validationOptions + ", timeSeriesOptions=" + + timeSeriesOptions + ", changeStreamOptions=" + changeStreamOptions + ", encryptedCollectionOptions=" + + encryptedFieldsOptions + ", disableValidation=" + disableValidation() + ", strictValidation=" + + strictValidation() + ", moderateValidation=" + moderateValidation() + ", warnOnValidationError=" + + warnOnValidationError() + ", failOnValidationError=" + failOnValidationError() + '}'; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + CollectionOptions that = (CollectionOptions) o; + + if (!ObjectUtils.nullSafeEquals(maxDocuments, that.maxDocuments)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(size, that.size)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(capped, that.capped)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(collation, that.collation)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(validationOptions, that.validationOptions)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(timeSeriesOptions, that.timeSeriesOptions)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(changeStreamOptions, that.changeStreamOptions)) { + return false; + } + return ObjectUtils.nullSafeEquals(encryptedFieldsOptions, that.encryptedFieldsOptions); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(maxDocuments); + result = 31 * result + ObjectUtils.nullSafeHashCode(size); + result = 31 * result + ObjectUtils.nullSafeHashCode(capped); + result = 31 * result + ObjectUtils.nullSafeHashCode(collation); + result = 31 * result + ObjectUtils.nullSafeHashCode(validationOptions); + result = 31 * result + ObjectUtils.nullSafeHashCode(timeSeriesOptions); + result = 31 * result + ObjectUtils.nullSafeHashCode(changeStreamOptions); + result = 31 * result + ObjectUtils.nullSafeHashCode(encryptedFieldsOptions); + return result; + } + /** * Encapsulation of ValidationOptions options. * @@ -312,7 +543,6 @@ public Optional getValidationOptions() { * @author Andreas Zink * @since 2.1 */ - @RequiredArgsConstructor public static class ValidationOptions { private static final ValidationOptions NONE = new ValidationOptions(null, null, null); @@ -321,6 +551,14 @@ public static class ValidationOptions { private final @Nullable ValidationLevel validationLevel; private final @Nullable ValidationAction validationAction; + public ValidationOptions(@Nullable Validator validator, @Nullable ValidationLevel validationLevel, + @Nullable ValidationAction validationAction) { + + this.validator = validator; + this.validationLevel = validationLevel; + this.validationAction = validationAction; + } + /** * Create an empty {@link ValidationOptions}. * @@ -336,6 +574,7 @@ public static ValidationOptions none() { * @param validator can be {@literal null}. * @return new instance of {@link ValidationOptions}. */ + @Contract("_ -> new") public ValidationOptions validator(@Nullable Validator validator) { return new ValidationOptions(validator, validationLevel, validationAction); } @@ -346,6 +585,7 @@ public ValidationOptions validator(@Nullable Validator validator) { * @param validationLevel can be {@literal null}. * @return new instance of {@link ValidationOptions}. */ + @Contract("_ -> new") public ValidationOptions validationLevel(ValidationLevel validationLevel) { return new ValidationOptions(validator, validationLevel, validationAction); } @@ -356,6 +596,7 @@ public ValidationOptions validationLevel(ValidationLevel validationLevel) { * @param validationAction can be {@literal null}. * @return new instance of {@link ValidationOptions}. */ + @Contract("_ -> new") public ValidationOptions validationAction(ValidationAction validationAction) { return new ValidationOptions(validator, validationLevel, validationAction); } @@ -381,7 +622,7 @@ public Optional getValidationLevel() { /** * Get the {@code validationAction} to perform. * - * @return @return {@link Optional#empty()} if not set. + * @return {@link Optional#empty()} if not set. */ public Optional getValidationAction() { return Optional.ofNullable(validationAction); @@ -393,5 +634,421 @@ public Optional getValidationAction() { boolean isEmpty() { return !Optionals.isAnyPresent(getValidator(), getValidationAction(), getValidationLevel()); } + + @Override + public String toString() { + + return "ValidationOptions{" + "validator=" + validator + ", validationLevel=" + validationLevel + + ", validationAction=" + validationAction + '}'; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + ValidationOptions that = (ValidationOptions) o; + + if (!ObjectUtils.nullSafeEquals(validator, that.validator)) { + return false; + } + if (validationLevel != that.validationLevel) + return false; + return validationAction == that.validationAction; + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(validator); + result = 31 * result + ObjectUtils.nullSafeHashCode(validationLevel); + result = 31 * result + ObjectUtils.nullSafeHashCode(validationAction); + return result; + } + } + + /** + * Encapsulation of Encryption options for collections. + * + * @author Christoph Strobl + * @since 4.5 + */ + public static class EncryptedFieldsOptions { + + private static final EncryptedFieldsOptions NONE = new EncryptedFieldsOptions(); + + private final @Nullable MongoJsonSchema schema; + private final List queryableProperties; + + EncryptedFieldsOptions() { + this(null, List.of()); + } + + private EncryptedFieldsOptions(@Nullable MongoJsonSchema schema, + List queryableProperties) { + + this.schema = schema; + this.queryableProperties = queryableProperties; + } + + /** + * @return {@link EncryptedFieldsOptions#NONE} + */ + public static EncryptedFieldsOptions none() { + return NONE; + } + + /** + * @return new instance of {@link EncryptedFieldsOptions}. + */ + public static EncryptedFieldsOptions fromSchema(MongoJsonSchema schema) { + return new EncryptedFieldsOptions(schema, List.of()); + } + + /** + * @return new instance of {@link EncryptedFieldsOptions}. + */ + public static EncryptedFieldsOptions fromProperties(List properties) { + return new EncryptedFieldsOptions(null, List.copyOf(properties)); + } + + /** + * Add a new {@link QueryableJsonSchemaProperty queryable property} for the given source property. + *

+ * Please note that, a given {@link JsonSchemaProperty} may override options from a given {@link MongoJsonSchema} if + * set. + * + * @param property the queryable source - typically + * {@link org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty + * encrypted}. + * @param characteristics the query options to set. + * @return new instance of {@link EncryptedFieldsOptions}. + */ + @Contract("_, _ -> new") + @CheckReturnValue + public EncryptedFieldsOptions queryable(JsonSchemaProperty property, QueryCharacteristic... characteristics) { + + List targetPropertyList = new ArrayList<>(queryableProperties.size() + 1); + targetPropertyList.addAll(queryableProperties); + targetPropertyList.add(JsonSchemaProperty.queryable(property, List.of(characteristics))); + + return new EncryptedFieldsOptions(schema, targetPropertyList); + } + + public Document toDocument() { + return new Document("fields", selectPaths()); + } + + private List selectPaths() { + + Map fields = new LinkedHashMap<>(); + for (Document field : fromSchema()) { + fields.put(field.get("path", String.class), field); + } + for (Document field : fromProperties()) { + fields.put(field.get("path", String.class), field); + } + return List.copyOf(fields.values()); + } + + private List fromProperties() { + + if (queryableProperties.isEmpty()) { + return List.of(); + } + + List converted = new ArrayList<>(queryableProperties.size()); + for (QueryableJsonSchemaProperty property : queryableProperties) { + + Document field = new Document("path", property.getIdentifier()); + + if (!property.getTypes().isEmpty()) { + field.append("bsonType", property.getTypes().iterator().next().toBsonType().value()); + } + + if (property + .getTargetProperty() instanceof IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty encrypted) { + if (encrypted.getKeyId() != null) { + if (encrypted.getKeyId() instanceof String stringKey) { + field.append("keyId", + new BsonBinary(BsonBinarySubType.UUID_STANDARD, stringKey.getBytes(StandardCharsets.UTF_8))); + } else { + field.append("keyId", encrypted.getKeyId()); + } + } + } + + field.append("queries", StreamSupport.stream(property.getCharacteristics().spliterator(), false) + .map(QueryCharacteristic::toDocument).toList()); + + if (!field.containsKey("keyId")) { + field.append("keyId", BsonNull.VALUE); + } + + converted.add(field); + } + return converted; + } + + private List fromSchema() { + + if (schema == null) { + return List.of(); + } + + Document root = schema.schemaDocument(); + Map paths = new LinkedHashMap<>(); + collectPaths(root, null, paths); + + List fields = new ArrayList<>(); + if (!paths.isEmpty()) { + + for (Entry entry : paths.entrySet()) { + Document field = new Document("path", entry.getKey()); + field.append("keyId", entry.getValue().getOrDefault("keyId", BsonNull.VALUE)); + if (entry.getValue().containsKey("bsonType")) { + field.append("bsonType", entry.getValue().get("bsonType")); + } + field.put("queries", entry.getValue().get("queries")); + fields.add(field); + } + } + + return fields; + } + } + + private static void collectPaths(Document document, @Nullable String currentPath, Map paths) { + + if (document.containsKey("type") && document.get("type").equals("object")) { + Object o = document.get("properties"); + if (o == null) { + return; + } + + if (o instanceof Document properties) { + for (Entry entry : properties.entrySet()) { + if (entry.getValue() instanceof Document nested) { + + String path = currentPath == null ? entry.getKey() : (currentPath + "." + entry.getKey()); + if (nested.containsKey("encrypt")) { + Document target = new Document(nested.get("encrypt", Document.class)); + if (nested.containsKey("queries")) { + List queries = nested.get("queries", List.class); + if (!queries.isEmpty() && queries.iterator().next() instanceof Document qd) { + target.putAll(qd); + } + } + paths.put(path, target); + } else { + collectPaths(nested, path, paths); + } + } + } + } + } + } + + /** + * Encapsulation of options applied to define collections change stream behaviour. + * + * @author Christoph Strobl + * @since 4.0 + */ + public static class CollectionChangeStreamOptions { + + private final boolean preAndPostImages; + + private CollectionChangeStreamOptions(boolean emitChangedRevisions) { + this.preAndPostImages = emitChangedRevisions; + } + + /** + * Output the version of a document before and after changes (the document pre- and post-images). + * + * @return new instance of {@link CollectionChangeStreamOptions}. + */ + public static CollectionChangeStreamOptions preAndPostImages(boolean emitChangedRevisions) { + return new CollectionChangeStreamOptions(true); + } + + public boolean getPreAndPostImages() { + return preAndPostImages; + } + + @Override + public String toString() { + return "CollectionChangeStreamOptions{" + "preAndPostImages=" + preAndPostImages + '}'; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + CollectionChangeStreamOptions that = (CollectionChangeStreamOptions) o; + + return preAndPostImages == that.preAndPostImages; + } + + @Override + public int hashCode() { + return (preAndPostImages ? 1 : 0); + } + } + + /** + * Options applicable to Time Series collections. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/core/timeseries-collections + * @since 3.3 + */ + public static class TimeSeriesOptions { + + private final String timeField; + + private @Nullable final String metaField; + + private final GranularityDefinition granularity; + + private final Duration expireAfter; + + private TimeSeriesOptions(String timeField, @Nullable String metaField, GranularityDefinition granularity, + Duration expireAfter) { + Assert.hasText(timeField, "Time field must not be empty or null"); + + this.timeField = timeField; + this.metaField = metaField; + this.granularity = granularity; + this.expireAfter = expireAfter; + } + + /** + * Create a new instance of {@link TimeSeriesOptions} using the given field as its {@literal timeField}. The one, + * that contains the date in each time series document.
+ * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * + * @param timeField must not be {@literal null}. + * @return new instance of {@link TimeSeriesOptions}. + */ + public static TimeSeriesOptions timeSeries(String timeField) { + return new TimeSeriesOptions(timeField, null, Granularity.DEFAULT, Duration.ofSeconds(-1)); + } + + /** + * Set the name of the field which contains metadata in each time series document. Should not be the {@literal id} + * nor {@link TimeSeriesOptions#timeSeries(String)} timeField} nor point to an {@literal array} or + * {@link java.util.Collection}.
+ * {@link Field#name() Annotated fieldnames} will be considered during the mapping process. + * + * @param metaField must not be {@literal null}. + * @return new instance of {@link TimeSeriesOptions}. + */ + @Contract("_ -> new") + public TimeSeriesOptions metaField(String metaField) { + return new TimeSeriesOptions(timeField, metaField, granularity, expireAfter); + } + + /** + * Select the {@link GranularityDefinition} parameter to define how data in the time series collection is organized. + * Select one that is closest to the time span between incoming measurements. + * + * @return new instance of {@link TimeSeriesOptions}. + * @see Granularity + */ + @Contract("_ -> new") + public TimeSeriesOptions granularity(GranularityDefinition granularity) { + return new TimeSeriesOptions(timeField, metaField, granularity, expireAfter); + } + + /** + * Set the {@link Duration} for automatic removal of documents older than a specified value. + * + * @param ttl must not be {@literal null}. + * @return new instance of {@link TimeSeriesOptions}. + * @see com.mongodb.client.model.CreateCollectionOptions#expireAfter(long, java.util.concurrent.TimeUnit) + * @since 4.4 + */ + @Contract("_ -> new") + public TimeSeriesOptions expireAfter(Duration ttl) { + return new TimeSeriesOptions(timeField, metaField, granularity, ttl); + } + + /** + * @return never {@literal null}. + */ + public String getTimeField() { + return timeField; + } + + /** + * @return can be {@literal null}. Might be an {@literal empty} {@link String} as well, so maybe check via + * {@link org.springframework.util.StringUtils#hasText(String)}. + */ + @Nullable + public String getMetaField() { + return metaField; + } + + /** + * @return never {@literal null}. + */ + public GranularityDefinition getGranularity() { + return granularity; + } + + /** + * Get the {@link Duration} for automatic removal of documents. + * + * @return a {@link Duration#isNegative() negative} value if not specified. + * @since 4.4 + */ + public Duration getExpireAfter() { + return expireAfter; + } + + @Override + public String toString() { + + return "TimeSeriesOptions{" + "timeField='" + timeField + '\'' + ", metaField='" + metaField + '\'' + + ", granularity=" + granularity + '}'; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + TimeSeriesOptions that = (TimeSeriesOptions) o; + + if (!ObjectUtils.nullSafeEquals(timeField, that.timeField)) { + return false; + } + if (!ObjectUtils.nullSafeEquals(metaField, that.metaField)) { + return false; + } + return ObjectUtils.nullSafeEquals(granularity, that.granularity); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(timeField); + result = 31 * result + ObjectUtils.nullSafeHashCode(metaField); + result = 31 * result + ObjectUtils.nullSafeHashCode(granularity); + return result; + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparer.java new file mode 100644 index 0000000000..f3769355c7 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparer.java @@ -0,0 +1,61 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.util.Assert; + +import com.mongodb.client.MongoCollection; + +/** + * Interface for functional preparation of a {@link MongoCollection}. + * + * @author Mark Paluch + * @since 4.1 + */ +public interface CollectionPreparer { + + /** + * Returns a preparer that always returns its input collection. + * + * @return a preparer that always returns its input collection. + */ + static CollectionPreparer identity() { + return it -> it; + } + + /** + * Prepare the {@code collection}. + * + * @param collection the collection to prepare. + * @return the prepared collection. + */ + T prepare(T collection); + + /** + * Returns a composed {@code CollectionPreparer} that first applies this preparer to the collection, and then applies + * the {@code after} preparer to the result. If evaluation of either function throws an exception, it is relayed to + * the caller of the composed function. + * + * @param after the collection preparer to apply after this function is applied. + * @return a composed {@code CollectionPreparer} that first applies this preparer and then applies the {@code after} + * preparer. + */ + default CollectionPreparer andThen(CollectionPreparer after) { + Assert.notNull(after, "After CollectionPreparer must not be null"); + return c -> after.prepare(prepare(c)); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparerSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparerSupport.java new file mode 100644 index 0000000000..bdf0b90ee3 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CollectionPreparerSupport.java @@ -0,0 +1,183 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Arrays; +import java.util.List; +import java.util.function.BiFunction; +import java.util.function.Function; + +import org.bson.Document; +import org.jspecify.annotations.Nullable; + +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.client.MongoCollection; + +/** + * Support class for delegate implementations to apply {@link ReadConcern} and {@link ReadPreference} settings upon + * {@link CollectionPreparer preparing a collection}. + * + * @author Mark Paluch + * @since 4.1 + */ +class CollectionPreparerSupport implements ReadConcernAware, ReadPreferenceAware { + + private final List sources; + + private CollectionPreparerSupport(List sources) { + this.sources = sources; + } + + T doPrepare(T collection, Function concernAccessor, BiFunction concernFunction, + Function preferenceAccessor, BiFunction preferenceFunction) { + + T collectionToUse = collection; + + for (Object source : sources) { + if (source instanceof ReadConcernAware rca && rca.hasReadConcern()) { + + ReadConcern concern = rca.getReadConcern(); + if (concernAccessor.apply(collectionToUse) != concern) { + collectionToUse = concernFunction.apply(collectionToUse, concern); + } + break; + } + } + + for (Object source : sources) { + if (source instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) { + + ReadPreference preference = rpa.getReadPreference(); + if (preferenceAccessor.apply(collectionToUse) != preference) { + collectionToUse = preferenceFunction.apply(collectionToUse, preference); + } + break; + } + } + + return collectionToUse; + } + + @Override + public boolean hasReadConcern() { + + for (Object aware : sources) { + if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) { + return true; + } + } + + return false; + } + + @Override + public @Nullable ReadConcern getReadConcern() { + + for (Object aware : sources) { + if (aware instanceof ReadConcernAware rca && rca.hasReadConcern()) { + return rca.getReadConcern(); + } + } + + return null; + } + + @Override + public boolean hasReadPreference() { + + for (Object aware : sources) { + if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) { + return true; + } + } + + return false; + } + + @Override + public @Nullable ReadPreference getReadPreference() { + + for (Object aware : sources) { + if (aware instanceof ReadPreferenceAware rpa && rpa.hasReadPreference()) { + return rpa.getReadPreference(); + } + } + + return null; + } + + static class CollectionPreparerDelegate extends CollectionPreparerSupport + implements CollectionPreparer> { + + private CollectionPreparerDelegate(List sources) { + super(sources); + } + + public static CollectionPreparerDelegate of(ReadPreferenceAware... awares) { + return of((Object[]) awares); + } + + public static CollectionPreparerDelegate of(Object... mixedAwares) { + + if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) { + return (CollectionPreparerDelegate) mixedAwares[0]; + } + + return new CollectionPreparerDelegate(Arrays.asList(mixedAwares)); + } + + @Override + public MongoCollection prepare(MongoCollection collection) { + return doPrepare(collection, MongoCollection::getReadConcern, MongoCollection::withReadConcern, + MongoCollection::getReadPreference, MongoCollection::withReadPreference); + } + + } + + static class ReactiveCollectionPreparerDelegate extends CollectionPreparerSupport + implements CollectionPreparer> { + + private ReactiveCollectionPreparerDelegate(List sources) { + super(sources); + } + + public static ReactiveCollectionPreparerDelegate of(ReadPreferenceAware... awares) { + return of((Object[]) awares); + } + + public static ReactiveCollectionPreparerDelegate of(Object... mixedAwares) { + + if (mixedAwares.length == 1 && mixedAwares[0] instanceof CollectionPreparerDelegate) { + return (ReactiveCollectionPreparerDelegate) mixedAwares[0]; + } + + return new ReactiveCollectionPreparerDelegate(Arrays.asList(mixedAwares)); + } + + @Override + public com.mongodb.reactivestreams.client.MongoCollection prepare( + com.mongodb.reactivestreams.client.MongoCollection collection) { + return doPrepare(collection, // + com.mongodb.reactivestreams.client.MongoCollection::getReadConcern, + com.mongodb.reactivestreams.client.MongoCollection::withReadConcern, + com.mongodb.reactivestreams.client.MongoCollection::getReadPreference, + com.mongodb.reactivestreams.client.MongoCollection::withReadPreference); + } + + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CountQuery.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CountQuery.java new file mode 100644 index 0000000000..11d9f09afd --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CountQuery.java @@ -0,0 +1,261 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import org.bson.Document; +import org.jspecify.annotations.Nullable; +import org.springframework.data.geo.Point; +import org.springframework.data.mongodb.core.query.MetricConversion; +import org.springframework.util.ObjectUtils; + +/** + * Value object representing a count query. Count queries using {@code $near} or {@code $nearSphere} require a rewrite + * to {@code $geoWithin}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0 + */ +class CountQuery { + + private final Document source; + + private CountQuery(Document source) { + this.source = source; + } + + public static CountQuery of(Document source) { + return new CountQuery(source); + } + + /** + * Returns the query {@link Document} that can be used with {@code countDocuments()}. Potentially rewrites the query + * to be usable with {@code countDocuments()}. + * + * @return the query {@link Document} that can be used with {@code countDocuments()}. + */ + public Document toQueryDocument() { + + if (!requiresRewrite(source)) { + return source; + } + + Document target = new Document(); + + for (Map.Entry entry : source.entrySet()) { + + if (entry.getValue() instanceof Document document && requiresRewrite(entry.getValue())) { + + target.putAll(createGeoWithin(entry.getKey(), document, source.get("$and"))); + continue; + } + + if (entry.getValue() instanceof Collection collection && requiresRewrite(entry.getValue())) { + + target.put(entry.getKey(), rewriteCollection(collection)); + continue; + } + + if ("$and".equals(entry.getKey()) && target.containsKey("$and")) { + // Expect $and to be processed with Document and createGeoWithin. + continue; + } + + target.put(entry.getKey(), entry.getValue()); + } + + return target; + } + + /** + * @param valueToInspect + * @return {@code true} if the enclosing element needs to be rewritten. + */ + private boolean requiresRewrite(Object valueToInspect) { + + if (valueToInspect instanceof Document document) { + return requiresRewrite(document); + } + + if (valueToInspect instanceof Collection collection) { + return requiresRewrite(collection); + } + + return false; + } + + private boolean requiresRewrite(Collection collection) { + + for (Object o : collection) { + if (o instanceof Document document && requiresRewrite(document)) { + return true; + } + } + + return false; + } + + private boolean requiresRewrite(Document document) { + + if (containsNear(document)) { + return true; + } + + for (Object entry : document.values()) { + + if (requiresRewrite(entry)) { + return true; + } + } + + return false; + } + + private Collection rewriteCollection(Collection source) { + + Collection rewrittenCollection = new ArrayList<>(source.size()); + + for (Object item : source) { + if (item instanceof Document document && requiresRewrite(item)) { + rewrittenCollection.add(CountQuery.of(document).toQueryDocument()); + } else { + rewrittenCollection.add(item); + } + } + + return rewrittenCollection; + } + + /** + * Rewrite the near query for field {@code key} to {@code $geoWithin}. + * + * @param key the queried field. + * @param source source {@link Document}. + * @param $and potentially existing {@code $and} condition. + * @return the rewritten query {@link Document}. + */ + @SuppressWarnings({ "unchecked", "NullAway" }) + private static Document createGeoWithin(String key, Document source, @Nullable Object $and) { + + boolean spheric = source.containsKey("$nearSphere"); + Object $near = spheric ? source.get("$nearSphere") : source.get("$near"); + + Number maxDistance = getMaxDistance(source, $near, spheric); + + List $centerMax = Arrays.asList(toCenterCoordinates($near), maxDistance); + Document $geoWithinMax = new Document("$geoWithin", + new Document(spheric ? "$centerSphere" : "$center", $centerMax)); + + if (!containsNearWithMinDistance(source)) { + return new Document(key, $geoWithinMax); + } + + Number minDistance = (Number) source.get("$minDistance"); + List $centerMin = Arrays.asList(toCenterCoordinates($near), minDistance); + Document $geoWithinMin = new Document("$geoWithin", + new Document(spheric ? "$centerSphere" : "$center", $centerMin)); + + List criteria; + + if ($and != null) { + if ($and instanceof Collection) { + Collection andElements = (Collection) $and; + criteria = new ArrayList<>(andElements.size() + 2); + criteria.addAll(andElements); + } else { + throw new IllegalArgumentException( + "Cannot rewrite query as it contains an '$and' element that is not a Collection: Offending element: " + + $and); + } + } else { + criteria = new ArrayList<>(2); + } + + criteria.add(new Document("$nor", Collections.singletonList(new Document(key, $geoWithinMin)))); + criteria.add(new Document(key, $geoWithinMax)); + + return new Document("$and", criteria); + } + + private static Number getMaxDistance(Document source, Object $near, boolean spheric) { + + Number maxDistance = Double.MAX_VALUE; + + if (source.containsKey("$maxDistance")) { // legacy coordinate pair + return (Number) source.get("$maxDistance"); + } + + if ($near instanceof Document nearDoc) { + + if (nearDoc.containsKey("$maxDistance")) { + + maxDistance = (Number) nearDoc.get("$maxDistance"); + // geojson is in Meters but we need radians x/(6378.1*1000) + if (spheric && nearDoc.containsKey("$geometry")) { + maxDistance = MetricConversion.metersToRadians(maxDistance.doubleValue()); + } + } + } + + return maxDistance; + } + + private static boolean containsNear(Document source) { + return source.containsKey("$near") || source.containsKey("$nearSphere"); + } + + private static boolean containsNearWithMinDistance(Document source) { + + if (!containsNear(source)) { + return false; + } + + return source.containsKey("$minDistance"); + } + + @SuppressWarnings("NullAway") + private static Object toCenterCoordinates(Object value) { + + if (ObjectUtils.isArray(value)) { + return value; + } + + if (value instanceof Point point) { + return Arrays.asList(point.getX(), point.getY()); + } + + if (value instanceof Document document) { + + if (document.containsKey("x")) { + return Arrays.asList(document.get("x"), document.get("y")); + } + + if (document.containsKey("$geometry")) { + Document geoJsonPoint = document.get("$geometry", Document.class); + return geoJsonPoint.get("coordinates"); + } + } + + return value; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CursorPreparer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CursorPreparer.java index 0a6ea266c3..3b53cef8d0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CursorPreparer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/CursorPreparer.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2002-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,15 @@ */ package org.springframework.data.mongodb.core; +import java.util.function.Function; + import org.bson.Document; +import org.jspecify.annotations.Nullable; +import org.springframework.util.Assert; +import com.mongodb.ReadPreference; import com.mongodb.client.FindIterable; +import com.mongodb.client.MongoCollection; /** * Simple callback interface to allow customization of a {@link FindIterable}. @@ -25,12 +31,52 @@ * @author Oliver Gierke * @author Christoph Strobl */ -interface CursorPreparer { +public interface CursorPreparer extends ReadPreferenceAware { + + /** + * Default {@link CursorPreparer} just passing on the given {@link FindIterable}. + * + * @since 2.2 + */ + CursorPreparer NO_OP_PREPARER = (iterable -> iterable); /** * Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor. * - * @param cursor + * @param iterable must not be {@literal null}. + * @return never {@literal null}. + */ + FindIterable prepare(FindIterable iterable); + + /** + * Apply query specific settings to {@link MongoCollection} and initiate a find operation returning a + * {@link FindIterable} via the given {@link Function find} function. + * + * @param collection must not be {@literal null}. + * @param find must not be {@literal null}. + * @return never {@literal null}. + * @throws IllegalArgumentException if one of the required arguments is {@literal null}. + * @since 2.2 + */ + default FindIterable initiateFind(MongoCollection collection, + Function, FindIterable> find) { + + Assert.notNull(collection, "Collection must not be null"); + Assert.notNull(find, "Find function must not be null"); + + if (hasReadPreference()) { + collection = collection.withReadPreference(getReadPreference()); + } + + return prepare(find.apply(collection)); + } + + /** + * @return the {@link ReadPreference} to apply or {@literal null} if none defined. + * @since 2.2 */ - FindIterable prepare(FindIterable cursor); + @Override + default @Nullable ReadPreference getReadPreference() { + return null; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbCallback.java index 0ce27d2737..f450bddb30 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DbCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,8 @@ */ package org.springframework.data.mongodb.core; +import org.jspecify.annotations.Nullable; import org.springframework.dao.DataAccessException; -import org.springframework.lang.Nullable; import com.mongodb.MongoException; import com.mongodb.client.MongoDatabase; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java index 7bce03edbe..8bc5349e61 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultBulkOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,37 +15,45 @@ */ package org.springframework.data.mongodb.core; -import lombok.NonNull; -import lombok.Value; - import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import org.bson.Document; -import org.bson.conversions.Bson; -import org.springframework.dao.DataAccessException; -import org.springframework.dao.support.PersistenceExceptionTranslator; +import org.jspecify.annotations.Nullable; +import org.springframework.context.ApplicationEvent; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.dao.DataIntegrityViolationException; +import org.springframework.data.mapping.callback.EntityCallback; +import org.springframework.data.mapping.callback.EntityCallbacks; +import org.springframework.data.mongodb.BulkOperationException; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.convert.UpdateMapper; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.event.AfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.BeforeSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.util.Pair; -import org.springframework.lang.Nullable; +import org.springframework.lang.Contract; import org.springframework.util.Assert; -import com.mongodb.BulkWriteException; +import com.mongodb.MongoBulkWriteException; import com.mongodb.WriteConcern; +import com.mongodb.bulk.BulkWriteResult; import com.mongodb.client.MongoCollection; import com.mongodb.client.model.BulkWriteOptions; import com.mongodb.client.model.DeleteManyModel; -import com.mongodb.client.model.DeleteOneModel; import com.mongodb.client.model.DeleteOptions; import com.mongodb.client.model.InsertOneModel; +import com.mongodb.client.model.ReplaceOneModel; +import com.mongodb.client.model.ReplaceOptions; import com.mongodb.client.model.UpdateManyModel; import com.mongodb.client.model.UpdateOneModel; import com.mongodb.client.model.UpdateOptions; @@ -58,18 +66,21 @@ * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch + * @author Minsu Kim + * @author Jens Schauder + * @author Michail Nikolaev + * @author Roman Puchkovskiy + * @author Jacob Botuck * @since 1.9 */ -class DefaultBulkOperations implements BulkOperations { +class DefaultBulkOperations extends BulkOperationsSupport implements BulkOperations { private final MongoOperations mongoOperations; private final String collectionName; private final BulkOperationContext bulkOperationContext; - private final List> models = new ArrayList<>(); + private final List models = new ArrayList<>(); - private PersistenceExceptionTranslator exceptionTranslator; private @Nullable WriteConcern defaultWriteConcern; - private BulkWriteOptions bulkOptions; /** @@ -84,24 +95,15 @@ class DefaultBulkOperations implements BulkOperations { DefaultBulkOperations(MongoOperations mongoOperations, String collectionName, BulkOperationContext bulkOperationContext) { - Assert.notNull(mongoOperations, "MongoOperations must not be null!"); - Assert.hasText(collectionName, "CollectionName must not be null nor empty!"); - Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null!"); + super(collectionName); + Assert.notNull(mongoOperations, "MongoOperations must not be null"); + Assert.hasText(collectionName, "CollectionName must not be null nor empty"); + Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null"); this.mongoOperations = mongoOperations; this.collectionName = collectionName; this.bulkOperationContext = bulkOperationContext; - this.exceptionTranslator = new MongoExceptionTranslator(); - this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode()); - } - - /** - * Configures the {@link PersistenceExceptionTranslator} to be used. Defaults to {@link MongoExceptionTranslator}. - * - * @param exceptionTranslator can be {@literal null}. - */ - public void setExceptionTranslator(@Nullable PersistenceExceptionTranslator exceptionTranslator) { - this.exceptionTranslator = exceptionTranslator == null ? new MongoExceptionTranslator() : exceptionTranslator; + this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode()); } /** @@ -113,117 +115,86 @@ void setDefaultWriteConcern(@Nullable WriteConcern defaultWriteConcern) { this.defaultWriteConcern = defaultWriteConcern; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#insert(java.lang.Object) - */ @Override + @Contract("_ -> this") public BulkOperations insert(Object document) { - Assert.notNull(document, "Document must not be null!"); - - if (document instanceof Document) { - - models.add(new InsertOneModel<>((Document) document)); - return this; - } - - Document sink = new Document(); - mongoOperations.getConverter().write(document, sink); + Assert.notNull(document, "Document must not be null"); - models.add(new InsertOneModel<>(sink)); + maybeEmitEvent(new BeforeConvertEvent<>(document, collectionName)); + Object source = maybeInvokeBeforeConvertCallback(document); + addModel(source, new InsertOneModel<>(getMappedObject(source))); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#insert(java.util.List) - */ @Override + @Contract("_ -> this") public BulkOperations insert(List documents) { - Assert.notNull(documents, "Documents must not be null!"); + Assert.notNull(documents, "Documents must not be null"); documents.forEach(this::insert); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#updateOne(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update) - */ @Override - @SuppressWarnings("unchecked") - public BulkOperations updateOne(Query query, Update update) { + @Contract("_, _ -> this") + public BulkOperations updateOne(Query query, UpdateDefinition update) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); - return updateOne(Collections.singletonList(Pair.of(query, update))); + return update(query, update, false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#updateOne(java.util.List) - */ @Override - public BulkOperations updateOne(List> updates) { + @Contract("_ -> this") + public BulkOperations updateOne(List> updates) { - Assert.notNull(updates, "Updates must not be null!"); + Assert.notNull(updates, "Updates must not be null"); - for (Pair update : updates) { + for (Pair update : updates) { update(update.getFirst(), update.getSecond(), false, false); } return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update) - */ @Override - @SuppressWarnings("unchecked") - public BulkOperations updateMulti(Query query, Update update) { + @Contract("_, _ -> this") + public BulkOperations updateMulti(Query query, UpdateDefinition update) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); - return updateMulti(Collections.singletonList(Pair.of(query, update))); + update(query, update, false, true); + + return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#updateMulti(java.util.List) - */ @Override - public BulkOperations updateMulti(List> updates) { + @Contract("_ -> this") + public BulkOperations updateMulti(List> updates) { - Assert.notNull(updates, "Updates must not be null!"); + Assert.notNull(updates, "Updates must not be null"); - for (Pair update : updates) { + for (Pair update : updates) { update(update.getFirst(), update.getSecond(), false, true); } return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update) - */ @Override - public BulkOperations upsert(Query query, Update update) { + @Contract("_, _ -> this") + public BulkOperations upsert(Query query, UpdateDefinition update) { return update(query, update, true, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#upsert(java.util.List) - */ @Override + @Contract("_ -> this") public BulkOperations upsert(List> updates) { for (Pair update : updates) { @@ -233,31 +204,25 @@ public BulkOperations upsert(List> updates) { return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#remove(org.springframework.data.mongodb.core.query.Query) - */ @Override + @Contract("_ -> this") public BulkOperations remove(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); DeleteOptions deleteOptions = new DeleteOptions(); query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation); - models.add(new DeleteManyModel<>(query.getQueryObject(), deleteOptions)); + addModel(query, new DeleteManyModel<>(query.getQueryObject(), deleteOptions)); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#remove(java.util.List) - */ @Override + @Contract("_ -> this") public BulkOperations remove(List removes) { - Assert.notNull(removes, "Removals must not be null!"); + Assert.notNull(removes, "Removals must not be null"); for (Query query : removes) { remove(query); @@ -266,23 +231,90 @@ public BulkOperations remove(List removes) { return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.BulkOperations#executeBulk() - */ + @Override + @Contract("_, _, _ -> this") + public BulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null"); + + ReplaceOptions replaceOptions = new ReplaceOptions(); + replaceOptions.upsert(options.isUpsert()); + if (query.isSorted()) { + replaceOptions.sort(query.getSortObject()); + } + query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation); + + maybeEmitEvent(new BeforeConvertEvent<>(replacement, collectionName)); + Object source = maybeInvokeBeforeConvertCallback(replacement); + addModel(source, new ReplaceOneModel<>(query.getQueryObject(), getMappedObject(source), replaceOptions)); + + return this; + } + @Override public com.mongodb.bulk.BulkWriteResult execute() { try { - - return mongoOperations.execute(collectionName, collection -> { - return collection.bulkWrite(models.stream().map(this::mapWriteModel).collect(Collectors.toList()), bulkOptions); - }); + + com.mongodb.bulk.BulkWriteResult result = mongoOperations.execute(collectionName, this::bulkWriteTo); + + Assert.state(result != null, "Result must not be null"); + + models.forEach(this::maybeEmitAfterSaveEvent); + models.forEach(this::maybeInvokeAfterSaveCallback); + + return result; } finally { - this.bulkOptions = getBulkWriteOptions(bulkOperationContext.getBulkMode()); + this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode()); } } + private BulkWriteResult bulkWriteTo(MongoCollection collection) { + + if (defaultWriteConcern != null) { + collection = collection.withWriteConcern(defaultWriteConcern); + } + + try { + + return collection.bulkWrite( // + models.stream() // + .map(this::extractAndMapWriteModel) // + .collect(Collectors.toList()), // + bulkOptions); + } catch (RuntimeException ex) { + + if (ex instanceof MongoBulkWriteException mongoBulkWriteException) { + + if (mongoBulkWriteException.getWriteConcernError() != null) { + throw new DataIntegrityViolationException(ex.getMessage(), ex); + } + throw new BulkOperationException(ex.getMessage(), mongoBulkWriteException); + } + + throw ex; + } + } + + private WriteModel extractAndMapWriteModel(SourceAwareWriteModelHolder it) { + + maybeEmitBeforeSaveEvent(it); + + if (it.model() instanceof InsertOneModel model) { + + Document target = model.getDocument(); + maybeInvokeBeforeSaveCallback(it.source(), target); + } else if (it.model() instanceof ReplaceOneModel model) { + + Document target = model.getReplacement(); + maybeInvokeBeforeSaveCallback(it.source(), target); + } + + return mapWriteModel(it.source(), it.model()); + } + /** * Performs update and upsert bulk operations. * @@ -292,95 +324,136 @@ public com.mongodb.bulk.BulkWriteResult execute() { * @param multi whether to issue a multi-update. * @return the {@link BulkOperations} with the update registered. */ - private BulkOperations update(Query query, Update update, boolean upsert, boolean multi) { + private BulkOperations update(Query query, UpdateDefinition update, boolean upsert, boolean multi) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); - UpdateOptions options = new UpdateOptions(); - options.upsert(upsert); - query.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation); + UpdateOptions options = computeUpdateOptions(query, update, upsert, multi); if (multi) { - models.add(new UpdateManyModel<>(query.getQueryObject(), update.getUpdateObject(), options)); + addModel(update, new UpdateManyModel<>(query.getQueryObject(), update.getUpdateObject(), options)); } else { - models.add(new UpdateOneModel<>(query.getQueryObject(), update.getUpdateObject(), options)); + addModel(update, new UpdateOneModel<>(query.getQueryObject(), update.getUpdateObject(), options)); } return this; } - private WriteModel mapWriteModel(WriteModel writeModel) { - - if (writeModel instanceof UpdateOneModel) { + @Override + protected void maybeEmitEvent(ApplicationEvent event) { + bulkOperationContext.publishEvent(event); + } - UpdateOneModel model = (UpdateOneModel) writeModel; + @Override + protected UpdateMapper updateMapper() { + return bulkOperationContext.updateMapper(); + } - return new UpdateOneModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()), - model.getOptions()); - } + @Override + protected QueryMapper queryMapper() { + return bulkOperationContext.queryMapper(); + } - if (writeModel instanceof UpdateManyModel) { + @Override + protected Optional> entity() { + return bulkOperationContext.entity(); + } - UpdateManyModel model = (UpdateManyModel) writeModel; + private Document getMappedObject(Object source) { - return new UpdateManyModel<>(getMappedQuery(model.getFilter()), getMappedUpdate(model.getUpdate()), - model.getOptions()); + if (source instanceof Document document) { + return document; } - if (writeModel instanceof DeleteOneModel) { + Document sink = new Document(); - DeleteOneModel model = (DeleteOneModel) writeModel; + mongoOperations.getConverter().write(source, sink); + return sink; + } - return new DeleteOneModel<>(getMappedQuery(model.getFilter()), model.getOptions()); - } + private void addModel(Object source, WriteModel model) { + models.add(new SourceAwareWriteModelHolder(source, model)); + } - if (writeModel instanceof DeleteManyModel) { + private void maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) { - DeleteManyModel model = (DeleteManyModel) writeModel; + if (holder.model() instanceof InsertOneModel model) { - return new DeleteManyModel<>(getMappedQuery(model.getFilter()), model.getOptions()); - } + Document target = model.getDocument(); + maybeInvokeAfterSaveCallback(holder.source(), target); + } else if (holder.model() instanceof ReplaceOneModel model) { - return writeModel; + Document target = model.getReplacement(); + maybeInvokeAfterSaveCallback(holder.source(), target); + } } - private Bson getMappedUpdate(Bson update) { - return bulkOperationContext.getUpdateMapper().getMappedObject(update, bulkOperationContext.getEntity()); + private void publishEvent(MongoMappingEvent event) { + bulkOperationContext.publishEvent(event); } - private Bson getMappedQuery(Bson query) { - return bulkOperationContext.getQueryMapper().getMappedObject(query, bulkOperationContext.getEntity()); + private Object maybeInvokeBeforeConvertCallback(Object value) { + return bulkOperationContext.callback(BeforeConvertCallback.class, value, collectionName); } - private static BulkWriteOptions getBulkWriteOptions(BulkMode bulkMode) { - - BulkWriteOptions options = new BulkWriteOptions(); - - switch (bulkMode) { - case ORDERED: - return options.ordered(true); - case UNORDERED: - return options.ordered(false); - } + private Object maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) { + return bulkOperationContext.callback(BeforeSaveCallback.class, value, mappedDocument, collectionName); + } - throw new IllegalStateException("BulkMode was null!"); + private Object maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) { + return bulkOperationContext.callback(AfterSaveCallback.class, value, mappedDocument, collectionName); } /** - * {@link BulkOperationContext} holds information about - * {@link org.springframework.data.mongodb.core.BulkOperations.BulkMode} the entity in use as well as references to + * {@link BulkOperationContext} holds information about {@link BulkMode} the entity in use as well as references to * {@link QueryMapper} and {@link UpdateMapper}. * * @author Christoph Strobl * @since 2.0 */ - @Value - static class BulkOperationContext { + record BulkOperationContext(BulkMode bulkMode, Optional> entity, + QueryMapper queryMapper, UpdateMapper updateMapper, @Nullable ApplicationEventPublisher eventPublisher, + @Nullable EntityCallbacks entityCallbacks) { + + public boolean skipEntityCallbacks() { + return entityCallbacks == null; + } + + public boolean skipEventPublishing() { + return eventPublisher == null; + } + + @SuppressWarnings({ "rawtypes", "NullAway" }) + public T callback(Class callbackType, T entity, String collectionName) { - @NonNull BulkMode bulkMode; - @NonNull Optional> entity; - @NonNull QueryMapper queryMapper; - @NonNull UpdateMapper updateMapper; + if (skipEntityCallbacks()) { + return entity; + } + + return entityCallbacks.callback(callbackType, entity, collectionName); + } + + @SuppressWarnings({ "rawtypes", "NullAway" }) + public T callback(Class callbackType, T entity, Document document, + String collectionName) { + + if (skipEntityCallbacks()) { + return entity; + } + + return entityCallbacks.callback(callbackType, entity, document, collectionName); + } + + @SuppressWarnings("NullAway") + public void publishEvent(ApplicationEvent event) { + + if (skipEventPublishing()) { + return; + } + + eventPublisher.publishEvent(event); + } } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java index 0574f57118..24d22bd80a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,15 +20,17 @@ import java.util.List; import org.bson.Document; +import org.jspecify.annotations.Nullable; import org.springframework.dao.DataAccessException; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; +import org.springframework.data.mongodb.UncategorizedMongoDbException; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.index.IndexDefinition; import org.springframework.data.mongodb.core.index.IndexInfo; import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.NumberUtils; import com.mongodb.MongoException; import com.mongodb.client.MongoCollection; @@ -52,7 +54,7 @@ public class DefaultIndexOperations implements IndexOperations { private final QueryMapper mapper; private final @Nullable Class type; - private MongoOperations mongoOperations; + private final MongoOperations mongoOperations; /** * Creates a new {@link DefaultIndexOperations}. @@ -64,7 +66,7 @@ public class DefaultIndexOperations implements IndexOperations { * {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}. */ @Deprecated - public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName, QueryMapper queryMapper) { + public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper) { this(mongoDbFactory, collectionName, queryMapper, null); } @@ -80,12 +82,12 @@ public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionNa * {@link DefaultIndexOperations#DefaultIndexOperations(MongoOperations, String, Class)}. */ @Deprecated - public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionName, QueryMapper queryMapper, + public DefaultIndexOperations(MongoDatabaseFactory mongoDbFactory, String collectionName, QueryMapper queryMapper, @Nullable Class type) { - Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!"); - Assert.notNull(collectionName, "Collection name can not be null!"); - Assert.notNull(queryMapper, "QueryMapper must not be null!"); + Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null"); + Assert.notNull(collectionName, "Collection name can not be null"); + Assert.notNull(queryMapper, "QueryMapper must not be null"); this.collectionName = collectionName; this.mapper = queryMapper; @@ -103,8 +105,8 @@ public DefaultIndexOperations(MongoDbFactory mongoDbFactory, String collectionNa */ public DefaultIndexOperations(MongoOperations mongoOperations, String collectionName, @Nullable Class type) { - Assert.notNull(mongoOperations, "MongoOperations must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(mongoOperations, "MongoOperations must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); this.mongoOperations = mongoOperations; this.mapper = new QueryMapper(mongoOperations.getConverter()); @@ -112,32 +114,25 @@ public DefaultIndexOperations(MongoOperations mongoOperations, String collection this.type = type; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition) - */ - public String ensureIndex(final IndexDefinition indexDefinition) { + @Override + @SuppressWarnings("NullAway") + public String ensureIndex(IndexDefinition indexDefinition) { return execute(collection -> { - Document indexOptions = indexDefinition.getIndexOptions(); - - IndexOptions ops = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition); - - if (indexOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) { + MongoPersistentEntity entity = lookupPersistentEntity(type, collectionName); - Assert.isInstanceOf(Document.class, indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY)); + IndexOptions indexOptions = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition); - ops.partialFilterExpression(mapper.getMappedObject((Document) indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), - lookupPersistentEntity(type, collectionName))); - } + indexOptions = addPartialFilterIfPresent(indexOptions, indexDefinition.getIndexOptions(), entity); + indexOptions = addDefaultCollationIfRequired(indexOptions, entity); - return collection.createIndex(indexDefinition.getIndexKeys(), ops); + Document mappedKeys = mapper.getMappedSort(indexDefinition.getIndexKeys(), entity); + return collection.createIndex(mappedKeys, indexOptions); }); } - @Nullable - private MongoPersistentEntity lookupPersistentEntity(@Nullable Class entityType, String collection) { + private @Nullable MongoPersistentEntity lookupPersistentEntity(@Nullable Class entityType, String collection) { if (entityType != null) { return mapper.getMappingContext().getRequiredPersistentEntity(entityType); @@ -154,11 +149,8 @@ private MongoPersistentEntity lookupPersistentEntity(@Nullable Class entit return null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexOperations#dropIndex(java.lang.String) - */ - public void dropIndex(final String name) { + @Override + public void dropIndex(String name) { execute(collection -> { collection.dropIndex(name); @@ -167,18 +159,29 @@ public void dropIndex(final String name) { } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexOperations#dropAllIndexes() - */ + @Override + @SuppressWarnings("NullAway") + public void alterIndex(String name, org.springframework.data.mongodb.core.index.IndexOptions options) { + + Document indexOptions = new Document("name", name); + indexOptions.putAll(options.toDocument()); + + Document result = mongoOperations + .execute(db -> db.runCommand(new Document("collMod", collectionName).append("index", indexOptions))); + + if (NumberUtils.convertNumberToTargetClass(result.get("ok", (Number) 0), Integer.class) != 1) { + throw new UncategorizedMongoDbException( + "Index '%s' could not be modified. Response was %s".formatted(name, result.toJson()), null); + } + } + + @Override public void dropAllIndexes() { dropIndex("*"); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexOperations#getIndexInfo() - */ + @Override + @SuppressWarnings("NullAway") public List getIndexInfo() { return execute(new CollectionCallback>() { @@ -192,7 +195,8 @@ public List doInCollection(MongoCollection collection) private List getIndexData(MongoCursor cursor) { - List indexInfoList = new ArrayList(); + int available = cursor.available(); + List indexInfoList = available > 0 ? new ArrayList<>(available) : new ArrayList<>(); while (cursor.hasNext()) { @@ -206,15 +210,33 @@ private List getIndexData(MongoCursor cursor) { }); } - @Nullable - public T execute(CollectionCallback callback) { + public @Nullable T execute(CollectionCallback callback) { + + Assert.notNull(callback, "CollectionCallback must not be null"); + + return mongoOperations.execute(collectionName, callback); + } - Assert.notNull(callback, "CollectionCallback must not be null!"); + private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document sourceOptions, + @Nullable MongoPersistentEntity entity) { - if (type != null) { - return mongoOperations.execute(type, callback); + if (!sourceOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) { + return ops; } - return mongoOperations.execute(collectionName, callback); + Assert.isInstanceOf(Document.class, sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY)); + return ops.partialFilterExpression( + mapper.getMappedSort((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity)); + } + + @SuppressWarnings("NullAway") + private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, + @Nullable MongoPersistentEntity entity) { + + if (ops.getCollation() != null || entity == null || !entity.hasCollation()) { + return ops; + } + + return ops.collation(entity.getCollation().toMongoCollation()); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java index d20404af85..a34c1fb945 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultIndexOperationsProvider.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,14 @@ */ package org.springframework.data.mongodb.core; -import org.springframework.data.mongodb.MongoDbFactory; +import org.jspecify.annotations.Nullable; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.index.IndexOperationsProvider; /** - * {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDbFactory}. + * {@link IndexOperationsProvider} to obtain {@link IndexOperations} from a given {@link MongoDatabaseFactory}. * * @author Mark Paluch * @author Christoph Strobl @@ -29,25 +30,21 @@ */ class DefaultIndexOperationsProvider implements IndexOperationsProvider { - private final MongoDbFactory mongoDbFactory; + private final MongoDatabaseFactory mongoDbFactory; private final QueryMapper mapper; /** * @param mongoDbFactory must not be {@literal null}. * @param mapper must not be {@literal null}. */ - DefaultIndexOperationsProvider(MongoDbFactory mongoDbFactory, QueryMapper mapper) { + DefaultIndexOperationsProvider(MongoDatabaseFactory mongoDbFactory, QueryMapper mapper) { this.mongoDbFactory = mongoDbFactory; this.mapper = mapper; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.IndexOperationsProvider#reactiveIndexOps(java.lang.String) - */ @Override - public IndexOperations indexOps(String collectionName) { - return new DefaultIndexOperations(mongoDbFactory, collectionName, mapper); + public IndexOperations indexOps(String collectionName, @Nullable Class type) { + return new DefaultIndexOperations(mongoDbFactory, collectionName, mapper, type); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperations.java new file mode 100644 index 0000000000..92c6a957dc --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveBulkOperations.java @@ -0,0 +1,403 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.lang.Contract; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.jspecify.annotations.Nullable; +import org.springframework.context.ApplicationEvent; +import org.springframework.context.ApplicationEventPublisher; +import org.springframework.data.mapping.callback.EntityCallback; +import org.springframework.data.mapping.callback.ReactiveEntityCallbacks; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; +import org.springframework.data.mongodb.core.mapping.event.ReactiveAfterSaveCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeConvertCallback; +import org.springframework.data.mongodb.core.mapping.event.ReactiveBeforeSaveCallback; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.util.Assert; + +import com.mongodb.WriteConcern; +import com.mongodb.bulk.BulkWriteResult; +import com.mongodb.client.model.BulkWriteOptions; +import com.mongodb.client.model.DeleteManyModel; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.InsertOneModel; +import com.mongodb.client.model.ReplaceOneModel; +import com.mongodb.client.model.ReplaceOptions; +import com.mongodb.client.model.UpdateManyModel; +import com.mongodb.client.model.UpdateOneModel; +import com.mongodb.client.model.UpdateOptions; +import com.mongodb.reactivestreams.client.MongoCollection; + +/** + * Default implementation for {@link ReactiveBulkOperations}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 4.1 + */ +class DefaultReactiveBulkOperations extends BulkOperationsSupport implements ReactiveBulkOperations { + + private final ReactiveMongoOperations mongoOperations; + private final String collectionName; + private final ReactiveBulkOperationContext bulkOperationContext; + private final List> models = new ArrayList<>(); + + private @Nullable WriteConcern defaultWriteConcern; + + private BulkWriteOptions bulkOptions; + + /** + * Creates a new {@link DefaultReactiveBulkOperations} for the given {@link MongoOperations}, collection name and + * {@link ReactiveBulkOperationContext}. + * + * @param mongoOperations must not be {@literal null}. + * @param collectionName must not be {@literal null}. + * @param bulkOperationContext must not be {@literal null}. + */ + DefaultReactiveBulkOperations(ReactiveMongoOperations mongoOperations, String collectionName, + ReactiveBulkOperationContext bulkOperationContext) { + + super(collectionName); + + Assert.notNull(mongoOperations, "MongoOperations must not be null"); + Assert.hasText(collectionName, "CollectionName must not be null nor empty"); + Assert.notNull(bulkOperationContext, "BulkOperationContext must not be null"); + + this.mongoOperations = mongoOperations; + this.collectionName = collectionName; + this.bulkOperationContext = bulkOperationContext; + this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode()); + } + + /** + * Configures the default {@link WriteConcern} to be used. Defaults to {@literal null}. + * + * @param defaultWriteConcern can be {@literal null}. + */ + void setDefaultWriteConcern(@Nullable WriteConcern defaultWriteConcern) { + this.defaultWriteConcern = defaultWriteConcern; + } + + @Override + @Contract("_ -> this") + public ReactiveBulkOperations insert(Object document) { + + Assert.notNull(document, "Document must not be null"); + + this.models.add(Mono.just(document).flatMap(it -> { + maybeEmitEvent(new BeforeConvertEvent<>(it, collectionName)); + return maybeInvokeBeforeConvertCallback(it); + }).map(it -> new SourceAwareWriteModelHolder(it, new InsertOneModel<>(getMappedObject(it))))); + + return this; + } + + @Override + @Contract("_ -> this") + public ReactiveBulkOperations insert(List documents) { + + Assert.notNull(documents, "Documents must not be null"); + + documents.forEach(this::insert); + + return this; + } + + @Override + @Contract("_, _, _ -> this") + public ReactiveBulkOperations updateOne(Query query, UpdateDefinition update) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); + + update(query, update, false, false); + return this; + } + + @Override + @Contract("_, _ -> this") + public ReactiveBulkOperations updateMulti(Query query, UpdateDefinition update) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); + + update(query, update, false, true); + return this; + } + + @Override + @Contract("_, _ -> this") + public ReactiveBulkOperations upsert(Query query, UpdateDefinition update) { + return update(query, update, true, true); + } + + @Override + @Contract("_ -> this") + public ReactiveBulkOperations remove(Query query) { + + Assert.notNull(query, "Query must not be null"); + + DeleteOptions deleteOptions = new DeleteOptions(); + query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation); + + this.models.add(Mono.just(query) + .map(it -> new SourceAwareWriteModelHolder(it, new DeleteManyModel<>(it.getQueryObject(), deleteOptions)))); + + return this; + } + + @Override + @Contract("_ -> this") + public ReactiveBulkOperations remove(List removes) { + + Assert.notNull(removes, "Removals must not be null"); + + for (Query query : removes) { + remove(query); + } + + return this; + } + + @Override + @Contract("_, _, _ -> this") + public ReactiveBulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null"); + + ReplaceOptions replaceOptions = new ReplaceOptions(); + replaceOptions.upsert(options.isUpsert()); + if (query.isSorted()) { + replaceOptions.sort(query.getSortObject()); + } + query.getCollation().map(Collation::toMongoCollation).ifPresent(replaceOptions::collation); + + this.models.add(Mono.just(replacement).flatMap(it -> { + maybeEmitEvent(new BeforeConvertEvent<>(it, collectionName)); + return maybeInvokeBeforeConvertCallback(it); + }).map(it -> new SourceAwareWriteModelHolder(it, + new ReplaceOneModel<>(query.getQueryObject(), getMappedObject(it), replaceOptions)))); + + return this; + } + + @Override + public Mono execute() { + + try { + return mongoOperations.execute(collectionName, this::bulkWriteTo).next(); + } finally { + this.bulkOptions = getBulkWriteOptions(bulkOperationContext.bulkMode()); + } + } + + private Mono bulkWriteTo(MongoCollection collection) { + + if (defaultWriteConcern != null) { + collection = collection.withWriteConcern(defaultWriteConcern); + } + + Flux concat = Flux.concat(models).flatMapSequential(it -> { + + if (it.model() instanceof InsertOneModel iom) { + + Document target = iom.getDocument(); + maybeEmitBeforeSaveEvent(it); + return maybeInvokeBeforeSaveCallback(it.source(), target) + .map(afterCallback -> new SourceAwareWriteModelHolder(afterCallback, mapWriteModel(afterCallback, iom))); + } else if (it.model() instanceof ReplaceOneModel rom) { + + Document target = rom.getReplacement(); + maybeEmitBeforeSaveEvent(it); + return maybeInvokeBeforeSaveCallback(it.source(), target) + .map(afterCallback -> new SourceAwareWriteModelHolder(afterCallback, mapWriteModel(afterCallback, rom))); + } + + return Mono.just(new SourceAwareWriteModelHolder(it.source(), mapWriteModel(it.source(), it.model()))); + }); + + MongoCollection theCollection = collection; + return concat.collectList().flatMap(it -> { + + return Mono + .from(theCollection + .bulkWrite(it.stream().map(SourceAwareWriteModelHolder::model).collect(Collectors.toList()), bulkOptions)) + .doOnSuccess(state -> { + it.forEach(this::maybeEmitAfterSaveEvent); + }).flatMap(state -> { + List> monos = it.stream().map(this::maybeInvokeAfterSaveCallback).collect(Collectors.toList()); + + return Flux.concat(monos).then(Mono.just(state)); + }); + }); + } + + /** + * Performs update and upsert bulk operations. + * + * @param query the {@link Query} to determine documents to update. + * @param update the {@link Update} to perform, must not be {@literal null}. + * @param upsert whether to upsert. + * @param multi whether to issue a multi-update. + * @return the {@link BulkOperations} with the update registered. + */ + private ReactiveBulkOperations update(Query query, UpdateDefinition update, boolean upsert, boolean multi) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); + + UpdateOptions options = computeUpdateOptions(query, update, upsert, multi); + + this.models.add(Mono.just(update).map(it -> { + if (multi) { + return new SourceAwareWriteModelHolder(update, + new UpdateManyModel<>(query.getQueryObject(), it.getUpdateObject(), options)); + } + return new SourceAwareWriteModelHolder(update, + new UpdateOneModel<>(query.getQueryObject(), it.getUpdateObject(), options)); + })); + + return this; + } + + @Override + protected void maybeEmitEvent(ApplicationEvent event) { + bulkOperationContext.publishEvent(event); + } + + @Override + protected UpdateMapper updateMapper() { + return bulkOperationContext.updateMapper(); + } + + @Override + protected QueryMapper queryMapper() { + return bulkOperationContext.queryMapper(); + } + + @Override + protected Optional> entity() { + return bulkOperationContext.entity(); + } + + private Document getMappedObject(Object source) { + + if (source instanceof Document) { + return (Document) source; + } + + Document sink = new Document(); + + mongoOperations.getConverter().write(source, sink); + return sink; + } + + private Mono maybeInvokeAfterSaveCallback(SourceAwareWriteModelHolder holder) { + + if (holder.model() instanceof InsertOneModel) { + + Document target = ((InsertOneModel) holder.model()).getDocument(); + return maybeInvokeAfterSaveCallback(holder.source(), target); + } else if (holder.model() instanceof ReplaceOneModel) { + + Document target = ((ReplaceOneModel) holder.model()).getReplacement(); + return maybeInvokeAfterSaveCallback(holder.source(), target); + } + return Mono.just(holder.source()); + } + + private Mono maybeInvokeBeforeConvertCallback(Object value) { + return bulkOperationContext.callback(ReactiveBeforeConvertCallback.class, value, collectionName); + } + + private Mono maybeInvokeBeforeSaveCallback(Object value, Document mappedDocument) { + return bulkOperationContext.callback(ReactiveBeforeSaveCallback.class, value, mappedDocument, collectionName); + } + + private Mono maybeInvokeAfterSaveCallback(Object value, Document mappedDocument) { + return bulkOperationContext.callback(ReactiveAfterSaveCallback.class, value, mappedDocument, collectionName); + } + + /** + * {@link ReactiveBulkOperationContext} holds information about {@link BulkMode} the entity in use as well as + * references to {@link QueryMapper} and {@link UpdateMapper}. + * + * @author Christoph Strobl + * @since 2.0 + */ + record ReactiveBulkOperationContext(BulkMode bulkMode, Optional> entity, + QueryMapper queryMapper, UpdateMapper updateMapper, @Nullable ApplicationEventPublisher eventPublisher, + @Nullable ReactiveEntityCallbacks entityCallbacks) { + + public boolean skipEntityCallbacks() { + return entityCallbacks == null; + } + + public boolean skipEventPublishing() { + return eventPublisher == null; + } + + @SuppressWarnings({ "rawtypes", "NullAway" }) + public Mono callback(Class callbackType, T entity, String collectionName) { + + if (skipEntityCallbacks()) { + return Mono.just(entity); + } + + return entityCallbacks.callback(callbackType, entity, collectionName); + } + + @SuppressWarnings({ "rawtypes", "NullAway" }) + public Mono callback(Class callbackType, T entity, Document document, + String collectionName) { + + if (skipEntityCallbacks()) { + return Mono.just(entity); + } + + return entityCallbacks.callback(callbackType, entity, document, collectionName); + } + + @SuppressWarnings("NullAway") + public void publishEvent(ApplicationEvent event) { + + if (skipEventPublishing()) { + return; + } + + eventPublisher.publishEvent(event); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java index a2d027d162..69ade2e163 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultReactiveIndexOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,16 +19,17 @@ import reactor.core.publisher.Mono; import java.util.Collection; -import java.util.Optional; import org.bson.Document; +import org.jspecify.annotations.Nullable; +import org.springframework.data.mongodb.UncategorizedMongoDbException; import org.springframework.data.mongodb.core.convert.QueryMapper; import org.springframework.data.mongodb.core.index.IndexDefinition; import org.springframework.data.mongodb.core.index.IndexInfo; import org.springframework.data.mongodb.core.index.ReactiveIndexOperations; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.NumberUtils; import com.mongodb.client.model.IndexOptions; @@ -46,7 +47,7 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations { private final ReactiveMongoOperations mongoOperations; private final String collectionName; private final QueryMapper queryMapper; - private final Optional> type; + private final @Nullable Class type; /** * Creates a new {@link DefaultReactiveIndexOperations}. @@ -57,7 +58,7 @@ public class DefaultReactiveIndexOperations implements ReactiveIndexOperations { */ public DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, String collectionName, QueryMapper queryMapper) { - this(mongoOperations, collectionName, queryMapper, Optional.empty()); + this(mongoOperations, collectionName, queryMapper, null); } /** @@ -69,16 +70,11 @@ public DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, S * @param type used for mapping potential partial index filter expression, must not be {@literal null}. */ public DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, String collectionName, - QueryMapper queryMapper, Class type) { - this(mongoOperations, collectionName, queryMapper, Optional.of(type)); - } - - private DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, String collectionName, - QueryMapper queryMapper, Optional> type) { + QueryMapper queryMapper, @Nullable Class type) { - Assert.notNull(mongoOperations, "ReactiveMongoOperations must not be null!"); - Assert.notNull(collectionName, "Collection must not be null!"); - Assert.notNull(queryMapper, "QueryMapper must not be null!"); + Assert.notNull(mongoOperations, "ReactiveMongoOperations must not be null"); + Assert.notNull(collectionName, "Collection must not be null"); + Assert.notNull(queryMapper, "QueryMapper must not be null"); this.mongoOperations = mongoOperations; this.collectionName = collectionName; @@ -86,37 +82,42 @@ private DefaultReactiveIndexOperations(ReactiveMongoOperations mongoOperations, this.type = type; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#ensureIndex(org.springframework.data.mongodb.core.index.IndexDefinition) - */ - public Mono ensureIndex(final IndexDefinition indexDefinition) { + @Override + @SuppressWarnings("NullAway") + public Mono ensureIndex(IndexDefinition indexDefinition) { return mongoOperations.execute(collectionName, collection -> { - Document indexOptions = indexDefinition.getIndexOptions(); - - IndexOptions ops = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition); - - if (indexOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) { - - Assert.isInstanceOf(Document.class, indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY)); + MongoPersistentEntity entity = getConfiguredEntity(); - MongoPersistentEntity entity = type - .map(val -> (MongoPersistentEntity) queryMapper.getMappingContext().getRequiredPersistentEntity(val)) - .orElseGet(() -> lookupPersistentEntity(collectionName)); + IndexOptions indexOptions = IndexConverters.indexDefinitionToIndexOptionsConverter().convert(indexDefinition); - ops = ops.partialFilterExpression( - queryMapper.getMappedObject(indexOptions.get(PARTIAL_FILTER_EXPRESSION_KEY, Document.class), entity)); - } + indexOptions = addPartialFilterIfPresent(indexOptions, indexDefinition.getIndexOptions(), entity); + indexOptions = addDefaultCollationIfRequired(indexOptions, entity); - return collection.createIndex(indexDefinition.getIndexKeys(), ops); + return collection.createIndex(indexDefinition.getIndexKeys(), indexOptions); }).next(); } - @Nullable - private MongoPersistentEntity lookupPersistentEntity(String collection) { + @Override + public Mono alterIndex(String name, org.springframework.data.mongodb.core.index.IndexOptions options) { + + return mongoOperations.execute(db -> { + Document indexOptions = new Document("name", name); + indexOptions.putAll(options.toDocument()); + + return Flux.from(db.runCommand(new Document("collMod", collectionName).append("index", indexOptions))) + .doOnNext(result -> { + if (NumberUtils.convertNumberToTargetClass(result.get("ok", (Number) 0), Integer.class) != 1) { + throw new UncategorizedMongoDbException( + "Index '%s' could not be modified. Response was %s".formatted(name, result.toJson()), null); + } + }); + }).then(); + } + + private @Nullable MongoPersistentEntity lookupPersistentEntity(String collection) { Collection> entities = queryMapper.getMappingContext().getPersistentEntities(); @@ -126,26 +127,51 @@ private MongoPersistentEntity lookupPersistentEntity(String collection) { .orElse(null); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropIndex(java.lang.String) - */ - public Mono dropIndex(final String name) { + @Override + public Mono dropIndex(String name) { return mongoOperations.execute(collectionName, collection -> collection.dropIndex(name)).then(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#dropAllIndexes() - */ + @Override public Mono dropAllIndexes() { return dropIndex("*"); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.index.ReactiveIndexOperations#getIndexInfo() - */ + @Override public Flux getIndexInfo() { return mongoOperations.execute(collectionName, collection -> collection.listIndexes(Document.class)) // .map(IndexConverters.documentToIndexInfoConverter()::convert); } + + private @Nullable MongoPersistentEntity getConfiguredEntity() { + + if (type != null) { + return queryMapper.getMappingContext().getRequiredPersistentEntity(type); + } + return lookupPersistentEntity(collectionName); + } + + private IndexOptions addPartialFilterIfPresent(IndexOptions ops, Document sourceOptions, + @Nullable MongoPersistentEntity entity) { + + if (!sourceOptions.containsKey(PARTIAL_FILTER_EXPRESSION_KEY)) { + return ops; + } + + Assert.isInstanceOf(Document.class, sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY)); + return ops.partialFilterExpression( + queryMapper.getMappedObject((Document) sourceOptions.get(PARTIAL_FILTER_EXPRESSION_KEY), entity)); + } + + @SuppressWarnings("NullAway") + private static IndexOptions addDefaultCollationIfRequired(IndexOptions ops, + @Nullable MongoPersistentEntity entity) { + + if (ops.getCollation() != null || entity == null || !entity.hasCollation()) { + return ops; + } + + return ops.collation(entity.getCollation().toMongoCollation()); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultScriptOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultScriptOperations.java index 1792eca4f2..6dde79e0e8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultScriptOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultScriptOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,9 @@ */ package org.springframework.data.mongodb.core; -import static java.util.UUID.*; -import static org.springframework.data.mongodb.core.query.Criteria.*; -import static org.springframework.data.mongodb.core.query.Query.*; +import static java.util.UUID.randomUUID; +import static org.springframework.data.mongodb.core.query.Criteria.where; +import static org.springframework.data.mongodb.core.query.Query.query; import java.util.ArrayList; import java.util.Arrays; @@ -28,27 +28,29 @@ import org.bson.Document; import org.bson.types.ObjectId; -import org.springframework.dao.DataAccessException; +import org.jspecify.annotations.NullUnmarked; +import org.jspecify.annotations.Nullable; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.script.ExecutableMongoScript; import org.springframework.data.mongodb.core.script.NamedMongoScript; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; import com.mongodb.BasicDBList; -import com.mongodb.MongoException; -import com.mongodb.client.MongoDatabase; /** - * Default implementation of {@link ScriptOperations} capable of saving and executing {@link ServerSideJavaScript}. + * Default implementation of {@link ScriptOperations} capable of saving and executing {@link ExecutableMongoScript}. * * @author Christoph Strobl * @author Oliver Gierke * @author Mark Paluch * @since 1.7 + * @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. */ +@Deprecated +@NullUnmarked class DefaultScriptOperations implements ScriptOperations { private static final String SCRIPT_COLLECTION_NAME = "system.js"; @@ -63,92 +65,59 @@ class DefaultScriptOperations implements ScriptOperations { */ public DefaultScriptOperations(MongoOperations mongoOperations) { - Assert.notNull(mongoOperations, "MongoOperations must not be null!"); + Assert.notNull(mongoOperations, "MongoOperations must not be null"); this.mongoOperations = mongoOperations; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#register(org.springframework.data.mongodb.core.script.ExecutableMongoScript) - */ @Override public NamedMongoScript register(ExecutableMongoScript script) { return register(new NamedMongoScript(generateScriptName(), script)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#register(org.springframework.data.mongodb.core.script.NamedMongoScript) - */ @Override public NamedMongoScript register(NamedMongoScript script) { - Assert.notNull(script, "Script must not be null!"); + Assert.notNull(script, "Script must not be null"); mongoOperations.save(script, SCRIPT_COLLECTION_NAME); return script; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#execute(org.springframework.data.mongodb.core.script.ExecutableMongoScript, java.lang.Object[]) - */ @Override - public Object execute(final ExecutableMongoScript script, final Object... args) { + public @Nullable Object execute(ExecutableMongoScript script, Object... args) { - Assert.notNull(script, "Script must not be null!"); + Assert.notNull(script, "Script must not be null"); - return mongoOperations.execute(new DbCallback() { + return mongoOperations.execute(db -> { - @Override - public Object doInDB(MongoDatabase db) throws MongoException, DataAccessException { - - Document command = new Document("$eval", script.getCode()); - BasicDBList commandArgs = new BasicDBList(); - commandArgs.addAll(Arrays.asList(convertScriptArgs(false, args))); - command.append("args", commandArgs); - return db.runCommand(command).get("retval"); - } + Document command = new Document("$eval", script.getCode()); + BasicDBList commandArgs = new BasicDBList(); + commandArgs.addAll(Arrays.asList(convertScriptArgs(false, args))); + command.append("args", commandArgs); + return db.runCommand(command).get("retval"); }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#call(java.lang.String, java.lang.Object[]) - */ @Override - public Object call(final String scriptName, final Object... args) { + public @Nullable Object call(String scriptName, Object... args) { - Assert.hasText(scriptName, "ScriptName must not be null or empty!"); + Assert.hasText(scriptName, "ScriptName must not be null or empty"); - return mongoOperations.execute(new DbCallback() { - - @Override - public Object doInDB(MongoDatabase db) throws MongoException, DataAccessException { - - return db.runCommand(new Document("eval", String.format("%s(%s)", scriptName, convertAndJoinScriptArgs(args)))) - .get("retval"); - } - }); + return mongoOperations.execute( + db -> db.runCommand(new Document("eval", String.format("%s(%s)", scriptName, convertAndJoinScriptArgs(args)))) + .get("retval")); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#exists(java.lang.String) - */ @Override public boolean exists(String scriptName) { - Assert.hasText(scriptName, "ScriptName must not be null or empty!"); + Assert.hasText(scriptName, "ScriptName must not be null or empty"); - return mongoOperations.exists(query(where("_id").is(scriptName)), NamedMongoScript.class, SCRIPT_COLLECTION_NAME); + return mongoOperations.exists(query(where(FieldName.ID.name()).is(scriptName)), NamedMongoScript.class, + SCRIPT_COLLECTION_NAME); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ScriptOperations#getScriptNames() - */ @Override public Set getScriptNames() { @@ -173,7 +142,7 @@ private Object[] convertScriptArgs(boolean quote, Object... args) { return args; } - List convertedValues = new ArrayList(args.length); + List convertedValues = new ArrayList<>(args.length); for (Object arg : args) { convertedValues.add(arg instanceof String && quote ? String.format("'%s'", arg) diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java index 016a3b0a28..c445e06f8a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DefaultWriteConcernResolver.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,8 @@ */ package org.springframework.data.mongodb.core; +import org.jspecify.annotations.Nullable; + import com.mongodb.WriteConcern; /** @@ -26,7 +28,7 @@ enum DefaultWriteConcernResolver implements WriteConcernResolver { INSTANCE; - public WriteConcern resolve(MongoAction action) { + public @Nullable WriteConcern resolve(MongoAction action) { return action.getDefaultWriteConcern(); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DocumentCallbackHandler.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DocumentCallbackHandler.java index 7d07ab775d..54f85051fb 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DocumentCallbackHandler.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/DocumentCallbackHandler.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -22,8 +22,8 @@ /** * An interface used by {@link MongoTemplate} for processing documents returned from a MongoDB query on a per-document - * basis. Implementations of this interface perform the actual work of prcoessing each document but don't need to worry - * about exception handling. {@MongoException}s will be caught and translated by the calling MongoTemplate An + * basis. Implementations of this interface perform the actual work of processing each document but don't need to worry + * about exception handling. {@link MongoException}s will be caught and translated by the calling MongoTemplate An * DocumentCallbackHandler is typically stateful: It keeps the result state within the object, to be available later for * later inspection. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java new file mode 100644 index 0000000000..601b6898b8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EncryptionAlgorithms.java @@ -0,0 +1,31 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +/** + * Encryption algorithms supported by MongoDB Client Side Field Level Encryption. + * + * @author Christoph Strobl + * @author Ross Lawley + * @since 3.3 + */ +public final class EncryptionAlgorithms { + + public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic = "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic"; + public static final String AEAD_AES_256_CBC_HMAC_SHA_512_Random = "AEAD_AES_256_CBC_HMAC_SHA_512-Random"; + public static final String RANGE = "Range"; + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityLifecycleEventDelegate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityLifecycleEventDelegate.java new file mode 100644 index 0000000000..ad3c2b8564 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityLifecycleEventDelegate.java @@ -0,0 +1,61 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.jspecify.annotations.Nullable; +import org.springframework.context.ApplicationEventPublisher; + +/** + * Delegate class to encapsulate lifecycle event configuration and publishing. + * + * @author Mark Paluch + * @since 4.0 + * @see ApplicationEventPublisher + */ +class EntityLifecycleEventDelegate { + + private @Nullable ApplicationEventPublisher publisher; + private boolean eventsEnabled = true; + + public void setPublisher(@Nullable ApplicationEventPublisher publisher) { + this.publisher = publisher; + } + + public boolean isEventsEnabled() { + return eventsEnabled; + } + + public void setEventsEnabled(boolean eventsEnabled) { + this.eventsEnabled = eventsEnabled; + } + + /** + * Publish an application event if event publishing is enabled. + * + * @param event the application event. + */ + @SuppressWarnings("NullAway") + public void publishEvent(Object event) { + + if (canPublishEvent()) { + publisher.publishEvent(event); + } + } + + private boolean canPublishEvent() { + return publisher != null && eventsEnabled; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java index a9b1d19d8f..1327656356 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,114 +15,202 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; - +import java.time.Duration; import java.util.Collection; +import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; +import java.util.function.Predicate; +import org.bson.BsonNull; import org.bson.Document; +import org.jspecify.annotations.Nullable; import org.springframework.core.convert.ConversionService; +import org.springframework.core.env.Environment; +import org.springframework.core.env.EnvironmentCapable; +import org.springframework.core.env.StandardEnvironment; import org.springframework.dao.InvalidDataAccessApiUsageException; +import org.springframework.data.convert.CustomConversions; +import org.springframework.data.expression.ValueEvaluationContext; import org.springframework.data.mapping.IdentifierAccessor; import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.PersistentEntity; import org.springframework.data.mapping.PersistentPropertyAccessor; +import org.springframework.data.mapping.PersistentPropertyPath; +import org.springframework.data.mapping.PropertyPath; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mapping.model.ConvertingPropertyAccessor; +import org.springframework.data.mongodb.core.CollectionOptions.EncryptedFieldsOptions; +import org.springframework.data.mongodb.core.CollectionOptions.TimeSeriesOptions; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoJsonSchemaMapper; import org.springframework.data.mongodb.core.convert.MongoWriter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.mapping.BasicMongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.mapping.TimeSeries; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.lang.Nullable; +import org.springframework.data.mongodb.core.timeseries.Granularity; +import org.springframework.data.mongodb.core.validation.Validator; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.mongodb.util.DurationUtil; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.projection.EntityProjectionIntrospector; +import org.springframework.data.projection.ProjectionFactory; +import org.springframework.data.projection.TargetAware; +import org.springframework.data.util.Optionals; +import org.springframework.expression.spel.support.SimpleEvaluationContext; import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; -import com.mongodb.util.JSONParseException; +import com.mongodb.client.model.ChangeStreamPreAndPostImagesOptions; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.TimeSeriesGranularity; +import com.mongodb.client.model.ValidationOptions; /** * Common operations performed on an entity in the context of it's mapping metadata. - * + * * @author Oliver Gierke + * @author Mark Paluch + * @author Christoph Strobl + * @author Ben Foster + * @author Ross Lawley * @since 2.1 * @see MongoTemplate * @see ReactiveMongoTemplate */ -@RequiredArgsConstructor class EntityOperations { - private static final String ID_FIELD = "_id"; + private static final String ID_FIELD = FieldName.ID.name(); + + private final MappingContext, MongoPersistentProperty> context; + private final QueryMapper queryMapper; - private final @NonNull MappingContext, MongoPersistentProperty> context; + private final EntityProjectionIntrospector introspector; + + private final MongoJsonSchemaMapper schemaMapper; + + private @Nullable Environment environment; + + EntityOperations(MongoConverter converter) { + this(converter, new QueryMapper(converter)); + } + + EntityOperations(MongoConverter converter, QueryMapper queryMapper) { + this(converter, converter.getMappingContext(), converter.getCustomConversions(), converter.getProjectionFactory(), + queryMapper); + } + + EntityOperations(MongoConverter converter, + MappingContext, MongoPersistentProperty> context, + CustomConversions conversions, ProjectionFactory projectionFactory, QueryMapper queryMapper) { + this.context = context; + this.queryMapper = queryMapper; + this.introspector = EntityProjectionIntrospector.create(projectionFactory, + EntityProjectionIntrospector.ProjectionPredicate.typeHierarchy() + .and(((target, underlyingType) -> !conversions.isSimpleType(target))), + context); + this.schemaMapper = new MongoJsonSchemaMapper(converter); + if (converter instanceof EnvironmentCapable environmentCapable) { + this.environment = environmentCapable.getEnvironment(); + } + } /** * Creates a new {@link Entity} for the given bean. - * + * * @param entity must not be {@literal null}. - * @return + * @return new instance of {@link Entity}. */ @SuppressWarnings({ "unchecked", "rawtypes" }) - public Entity forEntity(T entity) { + Entity forEntity(T entity) { - Assert.notNull(entity, "Bean must not be null!"); + Assert.notNull(entity, "Bean must not be null"); + + if (entity instanceof TargetAware targetAware) { + return new SimpleMappedEntity((Map) targetAware.getTarget(), this); + } if (entity instanceof String) { - return new SimpleEntity(parse(entity.toString())); + return new UnmappedEntity(parse(entity.toString()), this); } if (entity instanceof Map) { - return new SimpleEntity((Map) entity); + return new SimpleMappedEntity((Map) entity, this); } - return MappedEntity.of(entity, context); + return MappedEntity.of(entity, context, this); } /** * Creates a new {@link AdaptibleEntity} for the given bean and {@link ConversionService}. - * + * * @param entity must not be {@literal null}. * @param conversionService must not be {@literal null}. - * @return + * @return new instance of {@link AdaptibleEntity}. */ @SuppressWarnings({ "unchecked", "rawtypes" }) - public AdaptibleEntity forEntity(T entity, ConversionService conversionService) { + AdaptibleEntity forEntity(T entity, ConversionService conversionService) { - Assert.notNull(entity, "Bean must not be null!"); - Assert.notNull(conversionService, "ConversionService must not be null!"); + Assert.notNull(entity, "Bean must not be null"); + Assert.notNull(conversionService, "ConversionService must not be null"); if (entity instanceof String) { - return new SimpleEntity(parse(entity.toString())); + return new UnmappedEntity(parse(entity.toString()), this); } if (entity instanceof Map) { - return new SimpleEntity((Map) entity); + return new SimpleMappedEntity((Map) entity, this); } - return AdaptibleMappedEntity.of(entity, context, conversionService); + return AdaptibleMappedEntity.of(entity, context, conversionService, this); } - public String determineCollectionName(@Nullable Class entityClass) { + /** + * @param source can be {@literal null}. + * @return {@literal true} if the given value is an {@literal array}, {@link Collection} or {@link Iterator}. + * @since 3.2 + */ + static boolean isCollectionLike(@Nullable Object source) { - if (entityClass == null) { - throw new InvalidDataAccessApiUsageException( - "No class parameter provided, entity collection can't be determined!"); + if (source == null) { + return false; } - return context.getRequiredPersistentEntity(entityClass).getCollection(); + return ObjectUtils.isArray(source) || source instanceof Collection || source instanceof Iterator; } /** - * Returns the collection name to be used for the given entity. - * - * @param obj can be {@literal null}. - * @return + * @param entityClass should not be null. + * @return the {@link MongoPersistentEntity#getCollection() collection name}. */ - @Nullable - public String determineEntityCollectionName(@Nullable Object obj) { - return null == obj ? null : determineCollectionName(obj.getClass()); + public String determineCollectionName(@Nullable Class entityClass) { + + if (entityClass == null) { + throw new InvalidDataAccessApiUsageException( + "No class parameter provided, entity collection can't be determined"); + } + + MongoPersistentEntity persistentEntity = context.getPersistentEntity(entityClass); + + if (persistentEntity == null) { + throw new MappingException(String.format( + "Cannot determine collection name from type '%s'. Is it a store native type?", entityClass.getName())); + } + + return persistentEntity.getCollection(); } public Query getByIdInQuery(Collection entities) { @@ -143,13 +231,13 @@ public Query getByIdInQuery(Collection entities) { /** * Returns the name of the identifier property. Considers mapping information but falls back to the MongoDB default of * {@code _id} if no identifier property can be found. - * + * * @param type must not be {@literal null}. - * @return + * @return never {@literal null}. */ public String getIdPropertyName(Class type) { - Assert.notNull(type, "Type must not be null!"); + Assert.notNull(type, "Type must not be null"); MongoPersistentEntity persistentEntity = context.getPersistentEntity(type); @@ -160,54 +248,222 @@ public String getIdPropertyName(Class type) { return ID_FIELD; } + /** + * Return the name used for {@code $geoNear.distanceField} avoiding clashes with potentially existing properties. + * + * @param domainType must not be {@literal null}. + * @return the name of the distanceField to use. {@literal dis} by default. + * @since 2.2 + */ + public String nearQueryDistanceFieldName(Class domainType) { + + MongoPersistentEntity persistentEntity = context.getPersistentEntity(domainType); + if (persistentEntity == null || persistentEntity.getPersistentProperty("dis") == null) { + return "dis"; + } + + String distanceFieldName = "calculated-distance"; + int counter = 0; + while (persistentEntity.getPersistentProperty(distanceFieldName) != null) { + distanceFieldName += "-" + (counter++); + } + + return distanceFieldName; + } + private static Document parse(String source) { try { return Document.parse(source); - } catch (JSONParseException | org.bson.json.JsonParseException o_O) { - throw new MappingException("Could not parse given String to save into a JSON document!", o_O); + } catch (org.bson.json.JsonParseException o_O) { + throw new MappingException("Could not parse given String to save into a JSON document", o_O); + } catch (RuntimeException o_O) { + + // legacy 3.x exception + if (ClassUtils.matchesTypeName(o_O.getClass(), "JSONParseException")) { + throw new MappingException("Could not parse given String to save into a JSON document", o_O); + } + throw o_O; + } + } + + public TypedOperations forType(@Nullable Class entityClass) { + + if (entityClass != null) { + + MongoPersistentEntity entity = context.getPersistentEntity(entityClass); + + if (entity != null) { + return new TypedEntityOperations(entity, environment); + } + + } + return UntypedOperations.instance(); + } + + /** + * Introspect the given {@link Class result type} in the context of the {@link Class entity type} whether the returned + * type is a projection and what property paths are participating in the projection. + * + * @param resultType the type to project on. Must not be {@literal null}. + * @param entityType the source domain type. Must not be {@literal null}. + * @return the introspection result. + * @since 3.4 + * @see EntityProjectionIntrospector#introspect(Class, Class) + */ + public EntityProjection introspectProjection(Class resultType, Class entityType) { + + MongoPersistentEntity persistentEntity = queryMapper.getMappingContext().getPersistentEntity(entityType); + if (persistentEntity == null && !resultType.isInterface() || ClassUtils.isAssignable(Document.class, resultType)) { + return (EntityProjection) EntityProjection.nonProjecting(resultType); } + return introspector.introspect(resultType, entityType); + } + + /** + * Convert {@link CollectionOptions} to {@link CreateCollectionOptions} using {@link Class entityType} to obtain + * mapping metadata. + * + * @param collectionOptions + * @param entityType + * @return + * @since 3.4 + */ + public CreateCollectionOptions convertToCreateCollectionOptions(@Nullable CollectionOptions collectionOptions, + Class entityType) { + + Optional collation = Optionals.firstNonEmpty( + () -> Optional.ofNullable(collectionOptions).flatMap(CollectionOptions::getCollation), + () -> forType(entityType).getCollation());// + + CreateCollectionOptions result = new CreateCollectionOptions(); + collation.map(Collation::toMongoCollation).ifPresent(result::collation); + + if (collectionOptions == null) { + return result; + } + + collectionOptions.getCapped().ifPresent(result::capped); + collectionOptions.getSize().ifPresent(result::sizeInBytes); + collectionOptions.getMaxDocuments().ifPresent(result::maxDocuments); + collectionOptions.getCollation().map(Collation::toMongoCollation).ifPresent(result::collation); + + collectionOptions.getValidationOptions().ifPresent(it -> { + + ValidationOptions validationOptions = new ValidationOptions(); + + it.getValidationAction().ifPresent(validationOptions::validationAction); + it.getValidationLevel().ifPresent(validationOptions::validationLevel); + + it.getValidator().ifPresent(val -> validationOptions.validator(getMappedValidator(val, entityType))); + + result.validationOptions(validationOptions); + }); + + collectionOptions.getTimeSeriesOptions().map(forType(entityType)::mapTimeSeriesOptions).ifPresent(it -> { + + com.mongodb.client.model.TimeSeriesOptions options = new com.mongodb.client.model.TimeSeriesOptions( + it.getTimeField()); + + if (StringUtils.hasText(it.getMetaField())) { + options.metaField(it.getMetaField()); + } + if (!Granularity.DEFAULT.equals(it.getGranularity())) { + options.granularity(TimeSeriesGranularity.valueOf(it.getGranularity().name().toUpperCase())); + } + + if (!it.getExpireAfter().isNegative()) { + result.expireAfter(it.getExpireAfter().toSeconds(), TimeUnit.SECONDS); + } + + result.timeSeriesOptions(options); + }); + + collectionOptions.getChangeStreamOptions() // + .map(CollectionOptions.CollectionChangeStreamOptions::getPreAndPostImages) // + .map(ChangeStreamPreAndPostImagesOptions::new) // + .ifPresent(result::changeStreamPreAndPostImagesOptions); + + collectionOptions.getEncryptedFieldsOptions() // + .map(EncryptedFieldsOptions::toDocument) // + .filter(Predicate.not(Document::isEmpty)) // + .ifPresent(result::encryptedFields); + + return result; + } + + private Document getMappedValidator(Validator validator, Class domainType) { + + Document validationRules = validator.toDocument(); + + if (validationRules.containsKey("$jsonSchema")) { + return schemaMapper.mapSchema(validationRules, domainType); + } + + return queryMapper.getMappedObject(validationRules, context.getPersistentEntity(domainType)); } /** * A representation of information about an entity. * * @author Oliver Gierke + * @author Christoph Strobl * @since 2.1 */ interface Entity { /** * Returns the field name of the identifier of the entity. - * + * * @return */ String getIdFieldName(); /** * Returns the identifier of the entity. - * + * * @return */ + @Nullable Object getId(); + /** + * Returns the property value for {@code key}. + * + * @param key + * @return + * @since 4.1 + */ + @Nullable + Object getPropertyValue(String key); + /** * Returns the {@link Query} to find the entity by its identifier. - * + * * @return */ Query getByIdQuery(); + /** + * Returns the {@link Query} to remove an entity by its {@literal id} and if applicable {@literal version}. + * + * @return the {@link Query} to use for removing the entity. Never {@literal null}. + * @since 2.2 + */ + default Query getRemoveByQuery() { + return isVersionedEntity() ? getQueryForVersion() : getByIdQuery(); + } + /** * Returns the {@link Query} to find the entity in its current version. - * + * * @return */ Query getQueryForVersion(); /** * Maps the backing entity into a {@link MappedDocument} using the given {@link MongoWriter}. - * + * * @param writer must not be {@literal null}. * @return */ @@ -220,7 +476,7 @@ default void assertUpdateableIdIfNotSet() {} /** * Returns whether the entity is versioned, i.e. if it contains a version property. - * + * * @return */ default boolean isVersionedEntity() { @@ -228,19 +484,38 @@ default boolean isVersionedEntity() { } /** - * Returns the value of the version if the entity has a version property, {@literal null} otherwise. - * - * @return + * Returns the value of the version if the entity {@link #isVersionedEntity() has a version property}. + * + * @return the entity version. Can be {@literal null}. + * @throws IllegalStateException if the entity does not define a {@literal version} property. Make sure to check + * {@link #isVersionedEntity()}. */ @Nullable Object getVersion(); /** * Returns the underlying bean. - * + * * @return */ T getBean(); + + /** + * Returns whether the entity is considered to be new. + * + * @return + * @since 2.1.2 + */ + boolean isNew(); + + /** + * @param sortObject + * @return + * @since 4.1 + * @throws IllegalStateException if a sort key yields {@literal null}. + */ + Map extractKeys(Document sortObject, Class sourceType); + } /** @@ -254,74 +529,66 @@ interface AdaptibleEntity extends Entity { /** * Populates the identifier of the backing entity if it has an identifier property and there's no identifier * currently present. - * - * @param id must not be {@literal null}. + * + * @param id can be {@literal null}. * @return */ - @Nullable T populateIdIfNecessary(@Nullable Object id); /** - * Initializes the version property of the of the current entity if available. - * + * Initializes the version property of the current entity if available. + * * @return the entity with the version property updated if available. */ T initializeVersionProperty(); /** * Increments the value of the version property if available. - * + * * @return the entity with the version property incremented if available. */ T incrementVersion(); /** * Returns the current version value if the entity has a version property. - * - * @return the current version or {@literal null} in case it's uninitialized or the entity doesn't expose a version - * property. + * + * @return the current version or {@literal null} in case it's uninitialized. + * @throws IllegalStateException if the entity does not define a {@literal version} property. */ @Nullable Number getVersion(); } - @RequiredArgsConstructor - private static class SimpleEntity> implements AdaptibleEntity { + private static class UnmappedEntity> implements AdaptibleEntity { private final T map; + private final EntityOperations entityOperations; + + protected UnmappedEntity(T map, EntityOperations entityOperations) { + this.map = map; + this.entityOperations = entityOperations; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getIdPropertyName() - */ @Override public String getIdFieldName() { return ID_FIELD; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getId() - */ @Override - public Object getId() { - return map.get(ID_FIELD); + public @Nullable Object getId() { + return getPropertyValue(ID_FIELD); + } + + @Override + public @Nullable Object getPropertyValue(String key) { + return map.get(key); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getByIdQuery() - */ @Override public Query getByIdQuery() { return Query.query(Criteria.where(ID_FIELD).is(map.get(ID_FIELD))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.MutablePersistableSource#populateIdIfNecessary(java.lang.Object) - */ - @Nullable @Override public T populateIdIfNecessary(@Nullable Object id) { @@ -330,108 +597,150 @@ public T populateIdIfNecessary(@Nullable Object id) { return map; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getQueryForVersion() - */ @Override public Query getQueryForVersion() { - throw new MappingException("Cannot query for version on plain Documents!"); + throw new MappingException("Cannot query for version on plain Documents"); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#toMappedDocument(org.springframework.data.mongodb.core.convert.MongoWriter) - */ @Override public MappedDocument toMappedDocument(MongoWriter writer) { - return MappedDocument.of(map instanceof Document // - ? (Document) map // + return MappedDocument.of(map instanceof Document document // + ? document // : new Document(map)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.MutablePersistableSource#initializeVersionProperty() - */ @Override public T initializeVersionProperty() { return map; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.MutablePersistableSource#getVersion() - */ @Override - @Nullable - public Number getVersion() { + public @Nullable Number getVersion() { return null; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.MutablePersistableSource#incrementVersion() - */ @Override public T incrementVersion() { return map; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getBean() - */ @Override public T getBean() { return map; } + + @Override + public boolean isNew() { + return map.get(ID_FIELD) != null; + } + + @Override + public Map extractKeys(Document sortObject, Class sourceType) { + + Map keyset = new LinkedHashMap<>(); + MongoPersistentEntity sourceEntity = entityOperations.context.getPersistentEntity(sourceType); + if (sourceEntity != null && sourceEntity.hasIdProperty()) { + keyset.put(sourceEntity.getRequiredIdProperty().getName(), getId()); + } else { + keyset.put(ID_FIELD, getId()); + } + + for (String key : sortObject.keySet()) { + + Object value = resolveValue(key, sourceEntity); + + if (value == null) { + throw new IllegalStateException( + String.format("Cannot extract value for key %s because its value is null", key)); + } + + keyset.put(key, value); + } + + return keyset; + } + + @Nullable + private Object resolveValue(String key, @Nullable MongoPersistentEntity sourceEntity) { + + if (sourceEntity == null) { + return BsonUtils.resolveValue(map, key); + } + PropertyPath from = PropertyPath.from(key, sourceEntity.getTypeInformation()); + PersistentPropertyPath persistentPropertyPath = entityOperations.context + .getPersistentPropertyPath(from); + return BsonUtils.resolveValue(map, persistentPropertyPath.toDotPath(MongoPersistentProperty::getFieldName)); + } + } + + private static class SimpleMappedEntity> extends UnmappedEntity { + + protected SimpleMappedEntity(T map, EntityOperations entityOperations) { + super(map, entityOperations); + } + + @Override + @SuppressWarnings("unchecked") + public MappedDocument toMappedDocument(MongoWriter writer) { + + T bean = getBean(); + bean = (T) (bean instanceof Document document// + ? document // + : new Document(bean)); + Document document = new Document(); + writer.write(bean, document); + + return MappedDocument.of(document); + } } - @RequiredArgsConstructor(access = AccessLevel.PROTECTED) private static class MappedEntity implements Entity { - private final @NonNull MongoPersistentEntity entity; - private final @NonNull IdentifierAccessor idAccessor; - private final @NonNull PersistentPropertyAccessor propertyAccessor; + private final MongoPersistentEntity entity; + private final IdentifierAccessor idAccessor; + private final PersistentPropertyAccessor propertyAccessor; + private final EntityOperations entityOperations; + + protected MappedEntity(MongoPersistentEntity entity, IdentifierAccessor idAccessor, + PersistentPropertyAccessor propertyAccessor, EntityOperations entityOperations) { + + this.entity = entity; + this.idAccessor = idAccessor; + this.propertyAccessor = propertyAccessor; + this.entityOperations = entityOperations; + } private static MappedEntity of(T bean, - MappingContext, MongoPersistentProperty> context) { + MappingContext, MongoPersistentProperty> context, + EntityOperations entityOperations) { MongoPersistentEntity entity = context.getRequiredPersistentEntity(bean.getClass()); IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(bean); PersistentPropertyAccessor propertyAccessor = entity.getPropertyAccessor(bean); - return new MappedEntity<>(entity, identifierAccessor, propertyAccessor); + return new MappedEntity<>(entity, identifierAccessor, propertyAccessor, entityOperations); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getIdPropertyName() - */ @Override public String getIdFieldName() { return entity.getRequiredIdProperty().getFieldName(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getId() - */ @Override public Object getId() { return idAccessor.getRequiredIdentifier(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getByIdQuery() - */ + @Override + public @Nullable Object getPropertyValue(String key) { + return propertyAccessor.getProperty(entity.getRequiredPersistentProperty(key)); + } + @Override public Query getByIdQuery() { if (!entity.hasIdProperty()) { - throw new MappingException("No id property found for object of type " + entity.getType() + "!"); + throw new MappingException("No id property found for object of type " + entity.getType()); } MongoPersistentProperty idProperty = entity.getRequiredIdProperty(); @@ -439,24 +748,16 @@ public Query getByIdQuery() { return Query.query(Criteria.where(idProperty.getName()).is(getId())); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getQueryForVersion(java.lang.Object) - */ @Override public Query getQueryForVersion() { MongoPersistentProperty idProperty = entity.getRequiredIdProperty(); - MongoPersistentProperty property = entity.getRequiredVersionProperty(); + MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty(); return new Query(Criteria.where(idProperty.getName()).is(getId())// - .and(property.getName()).is(getVersion())); + .and(versionProperty.getName()).is(getVersion())); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#toMappedDocument(org.springframework.data.mongodb.core.convert.MongoWriter) - */ @Override public MappedDocument toMappedDocument(MongoWriter writer) { @@ -472,10 +773,6 @@ public MappedDocument toMappedDocument(MongoWriter writer) { return MappedDocument.of(document); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.Entity#assertUpdateableIdIfNotSet() - */ public void assertUpdateableIdIfNotSet() { if (!entity.hasIdProperty()) { @@ -491,38 +788,87 @@ public void assertUpdateableIdIfNotSet() { if (!MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(property.getType())) { throw new InvalidDataAccessApiUsageException( - String.format("Cannot autogenerate id of type %s for entity of type %s!", property.getType().getName(), + String.format("Cannot autogenerate id of type %s for entity of type %s", property.getType().getName(), entity.getType().getName())); } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#isVersionedEntity() - */ @Override public boolean isVersionedEntity() { return entity.hasVersionProperty(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getVersion() - */ @Override - @Nullable - public Object getVersion() { + public @Nullable Object getVersion() { return propertyAccessor.getProperty(entity.getRequiredVersionProperty()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.PersistableSource#getBean() - */ @Override public T getBean() { return propertyAccessor.getBean(); } + + @Override + public boolean isNew() { + return entity.isNew(propertyAccessor.getBean()); + } + + @Override + public Map extractKeys(Document sortObject, Class sourceType) { + + Map keyset = new LinkedHashMap<>(); + MongoPersistentEntity sourceEntity = entityOperations.context.getPersistentEntity(sourceType); + if (sourceEntity != null && sourceEntity.hasIdProperty()) { + keyset.put(sourceEntity.getRequiredIdProperty().getName(), getId()); + } else { + keyset.put(entity.getRequiredIdProperty().getName(), getId()); + } + + for (String key : sortObject.keySet()) { + + Object value; + if (key.indexOf('.') != -1) { + + // follow the path across nested levels. + // TODO: We should have a MongoDB-specific property path abstraction to allow diving into Document. + value = getNestedPropertyValue(key); + } else { + value = getPropertyValue(key); + } + + if (value == null) { + throw new IllegalStateException( + String.format("Cannot extract value for key %s because its value is null", key)); + } + + keyset.put(key, value); + } + + return keyset; + } + + private Object getNestedPropertyValue(String key) { + + String[] segments = key.split("\\."); + Entity currentEntity = this; + Object currentValue = BsonNull.VALUE; + + for (int i = 0; i < segments.length; i++) { + + String segment = segments[i]; + currentValue = currentEntity.getPropertyValue(segment); + + if (i < segments.length - 1) { + if (currentValue == null) { + return BsonNull.VALUE; + } + + currentEntity = entityOperations.forEntity(currentValue); + } + } + + return currentValue != null ? currentValue : BsonNull.VALUE; + } } private static class AdaptibleMappedEntity extends MappedEntity implements AdaptibleEntity { @@ -532,9 +878,9 @@ private static class AdaptibleMappedEntity extends MappedEntity implements private final IdentifierAccessor identifierAccessor; private AdaptibleMappedEntity(MongoPersistentEntity entity, IdentifierAccessor identifierAccessor, - ConvertingPropertyAccessor propertyAccessor) { + ConvertingPropertyAccessor propertyAccessor, EntityOperations entityOperations) { - super(entity, identifierAccessor, propertyAccessor); + super(entity, identifierAccessor, propertyAccessor, entityOperations); this.entity = entity; this.propertyAccessor = propertyAccessor; @@ -543,61 +889,44 @@ private AdaptibleMappedEntity(MongoPersistentEntity entity, IdentifierAccesso private static AdaptibleEntity of(T bean, MappingContext, MongoPersistentProperty> context, - ConversionService conversionService) { + ConversionService conversionService, EntityOperations entityOperations) { MongoPersistentEntity entity = context.getRequiredPersistentEntity(bean.getClass()); IdentifierAccessor identifierAccessor = entity.getIdentifierAccessor(bean); PersistentPropertyAccessor propertyAccessor = entity.getPropertyAccessor(bean); return new AdaptibleMappedEntity<>(entity, identifierAccessor, - new ConvertingPropertyAccessor<>(propertyAccessor, conversionService)); + new ConvertingPropertyAccessor<>(propertyAccessor, conversionService), entityOperations); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity#populateIdIfNecessary(java.lang.Object) - */ - @Nullable @Override public T populateIdIfNecessary(@Nullable Object id) { if (id == null) { - return null; + return propertyAccessor.getBean(); } - T bean = propertyAccessor.getBean(); MongoPersistentProperty idProperty = entity.getIdProperty(); - if (idProperty == null) { - return bean; + return propertyAccessor.getBean(); } if (identifierAccessor.getIdentifier() != null) { - return bean; + return propertyAccessor.getBean(); } propertyAccessor.setProperty(idProperty, id); - return propertyAccessor.getBean(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.MappedEntity#getVersion() - */ @Override - @Nullable - public Number getVersion() { + public @Nullable Number getVersion() { MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty(); return propertyAccessor.getProperty(versionProperty, Number.class); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity#initializeVersionProperty() - */ @Override public T initializeVersionProperty() { @@ -605,15 +934,13 @@ public T initializeVersionProperty() { return propertyAccessor.getBean(); } - propertyAccessor.setProperty(entity.getRequiredVersionProperty(), 0); + MongoPersistentProperty versionProperty = entity.getRequiredVersionProperty(); + + propertyAccessor.setProperty(versionProperty, versionProperty.getType().isPrimitive() ? 1 : 0); return propertyAccessor.getBean(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity#incrementVersion() - */ @Override public T incrementVersion() { @@ -626,4 +953,224 @@ public T incrementVersion() { return propertyAccessor.getBean(); } } + + /** + * Type-specific operations abstraction. + * + * @author Mark Paluch + * @param + * @since 2.2 + */ + interface TypedOperations { + + /** + * Return the optional {@link Collation} for the underlying entity. + * + * @return + */ + Optional getCollation(); + + /** + * Return the optional {@link Collation} from the given {@link Query} and fall back to the collation configured for + * the underlying entity. + * + * @return + */ + Optional getCollation(Query query); + + /** + * Derive the applicable {@link CollectionOptions} for the given type. + * + * @return never {@literal null}. + * @since 3.3 + */ + CollectionOptions getCollectionOptions(); + + /** + * Map the fields of a given {@link TimeSeriesOptions} against the target domain type to consider potentially + * annotated field names. + * + * @param options must not be {@literal null}. + * @return never {@literal null}. + * @since 3.3 + */ + TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options); + + /** + * @return the name of the id field. + * @since 4.1 + */ + default String getIdKeyName() { + return ID_FIELD; + } + } + + /** + * {@link TypedOperations} for generic entities that are not represented with {@link PersistentEntity} (e.g. custom + * conversions). + */ + enum UntypedOperations implements TypedOperations { + + INSTANCE; + + UntypedOperations() {} + + @SuppressWarnings({ "unchecked", "rawtypes" }) + public static TypedOperations instance() { + return (TypedOperations) INSTANCE; + } + + @Override + public Optional getCollation() { + return Optional.empty(); + } + + @Override + public Optional getCollation(Query query) { + + if (query == null) { + return Optional.empty(); + } + + return query.getCollation(); + } + + @Override + public CollectionOptions getCollectionOptions() { + return CollectionOptions.empty(); + } + + @Override + public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions options) { + return options; + } + } + + /** + * {@link TypedOperations} backed by {@link MongoPersistentEntity}. + * + * @param + */ + static class TypedEntityOperations implements TypedOperations { + + private final MongoPersistentEntity entity; + + @Nullable private final Environment environment; + + protected TypedEntityOperations(MongoPersistentEntity entity, @Nullable Environment environment) { + + this.entity = entity; + this.environment = environment; + } + + @Override + public Optional getCollation() { + return Optional.ofNullable(entity.getCollation()); + } + + @Override + public Optional getCollation(Query query) { + + if (query.getCollation().isPresent()) { + return query.getCollation(); + } + + return Optional.ofNullable(entity.getCollation()); + } + + @Override + public CollectionOptions getCollectionOptions() { + + CollectionOptions collectionOptions = CollectionOptions.empty(); + if (entity.hasCollation()) { + collectionOptions = collectionOptions.collation(entity.getCollation()); + } + + if (entity.isAnnotationPresent(TimeSeries.class)) { + + TimeSeries timeSeries = entity.getRequiredAnnotation(TimeSeries.class); + + if (entity.getPersistentProperty(timeSeries.timeField()) == null) { + throw new MappingException(String.format("Time series field '%s' does not exist in type %s", + timeSeries.timeField(), entity.getName())); + } + + TimeSeriesOptions options = TimeSeriesOptions.timeSeries(timeSeries.timeField()); + if (StringUtils.hasText(timeSeries.metaField())) { + + if (entity.getPersistentProperty(timeSeries.metaField()) == null) { + throw new MappingException( + String.format("Meta field '%s' does not exist in type %s", timeSeries.metaField(), entity.getName())); + } + + options = options.metaField(timeSeries.metaField()); + } + if (!Granularity.DEFAULT.equals(timeSeries.granularity())) { + options = options.granularity(timeSeries.granularity()); + } + + if (StringUtils.hasText(timeSeries.expireAfter())) { + + Duration timeout = computeIndexTimeout(timeSeries.expireAfter(), getEvaluationContextForEntity(entity)); + if (!timeout.isNegative()) { + options = options.expireAfter(timeout); + } + } + + collectionOptions = collectionOptions.timeSeries(options); + } + + return collectionOptions; + } + + @Override + public TimeSeriesOptions mapTimeSeriesOptions(TimeSeriesOptions source) { + + TimeSeriesOptions target = TimeSeriesOptions.timeSeries(mappedNameOrDefault(source.getTimeField())); + + if (StringUtils.hasText(source.getMetaField())) { + target = target.metaField(mappedNameOrDefault(source.getMetaField())); + } + return target.granularity(source.getGranularity()).expireAfter(source.getExpireAfter()); + } + + @Override + public String getIdKeyName() { + return entity.getIdProperty() != null ? entity.getIdProperty().getName() : ID_FIELD; + } + + private String mappedNameOrDefault(String name) { + MongoPersistentProperty persistentProperty = entity.getPersistentProperty(name); + return persistentProperty != null ? persistentProperty.getFieldName() : name; + } + + /** + * Get the {@link ValueEvaluationContext} for a given {@link PersistentEntity entity} the default one. + * + * @param persistentEntity can be {@literal null} + * @return the context to use. + */ + private ValueEvaluationContext getEvaluationContextForEntity(@Nullable PersistentEntity persistentEntity) { + + if (persistentEntity instanceof BasicMongoPersistentEntity mongoEntity) { + return mongoEntity.getValueEvaluationContext(null); + } + + return ValueEvaluationContext.of(this.environment != null ? this.environment : new StandardEnvironment(), + SimpleEvaluationContext.forReadOnlyDataBinding().build()); + } + + /** + * Compute the index timeout value by evaluating a potential + * {@link org.springframework.expression.spel.standard.SpelExpression} and parsing the final value. + * + * @param timeoutValue must not be {@literal null}. + * @param evaluationContext must not be {@literal null}. + * @return never {@literal null} + * @throws IllegalArgumentException for invalid duration values. + */ + private static Duration computeIndexTimeout(String timeoutValue, ValueEvaluationContext evaluationContext) { + return DurationUtil.evaluate(timeoutValue, evaluationContext); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityResultConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityResultConverter.java new file mode 100644 index 0000000000..c04ae9d603 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/EntityResultConverter.java @@ -0,0 +1,33 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.bson.Document; + +enum EntityResultConverter implements QueryResultConverter { + + INSTANCE; + + @Override + public Object mapDocument(Document document, ConversionResultSupplier reader) { + return reader.get(); + } + + @Override + public QueryResultConverter andThen(QueryResultConverter after) { + return (QueryResultConverter) after; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java index 421ce3feb1..57813a75ba 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,11 @@ */ package org.springframework.data.mongodb.core; +import java.util.stream.Stream; + import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.aggregation.AggregationResults; -import org.springframework.data.util.CloseableIterator; +import org.springframework.lang.Contract; /** * {@link ExecutableAggregationOperation} allows creation and execution of MongoDB aggregation operations in a fluent @@ -44,7 +46,7 @@ public interface ExecutableAggregationOperation { /** * Start creating an aggregation operation that returns results mapped to the given domain type.
* Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to specify a potentially different - * input type for he aggregation. + * input type for the aggregation. * * @param domainType must not be {@literal null}. * @return new instance of {@link ExecutableAggregation}. @@ -75,10 +77,23 @@ interface AggregationWithCollection { * Trigger execution by calling one of the terminating methods. * * @author Christoph Strobl + * @author Mark Paluch * @since 2.0 */ interface TerminatingAggregation { + /** + * Map the query result to a different type using {@link QueryResultConverter}. + * + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link TerminatingAggregation}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since 5.0 + */ + @Contract("_ -> new") + TerminatingAggregation map(QueryResultConverter converter); + /** * Apply pipeline operations as specified and get all matching elements. * @@ -88,12 +103,12 @@ interface TerminatingAggregation { /** * Apply pipeline operations as specified and stream all matching elements.
- * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.Cursor} + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.FindIterable} * - * @return a {@link CloseableIterator} that wraps the a Mongo DB {@link com.mongodb.Cursor} that needs to be closed. - * Never {@literal null}. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). */ - CloseableIterator stream(); + Stream stream(); } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java index 87ebb41bab..13dc8cd436 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableAggregationOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,12 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; +import java.util.stream.Stream; +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.aggregation.AggregationResults; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; -import org.springframework.data.util.CloseableIterator; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -35,88 +31,91 @@ * @author Mark Paluch * @since 2.0 */ -@RequiredArgsConstructor class ExecutableAggregationOperationSupport implements ExecutableAggregationOperation { - private final @NonNull MongoTemplate template; + private final MongoTemplate template; + + ExecutableAggregationOperationSupport(MongoTemplate template) { + this.template = template; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation#aggregateAndReturn(java.lang.Class) - */ @Override public ExecutableAggregation aggregateAndReturn(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); - return new ExecutableAggregationSupport<>(template, domainType, null, null); + return new ExecutableAggregationSupport<>(template, domainType, QueryResultConverter.entity(), null, null); } /** * @author Christoph Strobl * @since 2.0 */ - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) - static class ExecutableAggregationSupport + static class ExecutableAggregationSupport implements AggregationWithAggregation, ExecutableAggregation, TerminatingAggregation { - @NonNull MongoTemplate template; - @NonNull Class domainType; - @Nullable Aggregation aggregation; - @Nullable String collection; + private final MongoTemplate template; + private final Class domainType; + private final QueryResultConverter resultConverter; + private final @Nullable Aggregation aggregation; + private final @Nullable String collection; + + public ExecutableAggregationSupport(MongoTemplate template, Class domainType, + QueryResultConverter resultConverter, @Nullable Aggregation aggregation, + @Nullable String collection) { + this.template = template; + this.domainType = domainType; + this.resultConverter = resultConverter; + this.aggregation = aggregation; + this.collection = collection; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation.AggregationWithCollection#inCollection(java.lang.String) - */ @Override public AggregationWithAggregation inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); - return new ExecutableAggregationSupport<>(template, domainType, aggregation, collection); + return new ExecutableAggregationSupport<>(template, domainType, resultConverter, aggregation, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation.AggregationWithAggregation#by(org.springframework.data.mongodb.core.aggregation.Aggregation) - */ @Override public TerminatingAggregation by(Aggregation aggregation) { - Assert.notNull(aggregation, "Aggregation must not be null!"); + Assert.notNull(aggregation, "Aggregation must not be null"); + + return new ExecutableAggregationSupport<>(template, domainType, resultConverter, aggregation, collection); + } + + @Override + public TerminatingAggregation map(QueryResultConverter converter) { + + Assert.notNull(converter, "QueryResultConverter must not be null"); - return new ExecutableAggregationSupport<>(template, domainType, aggregation, collection); + return new ExecutableAggregationSupport<>(template, domainType, this.resultConverter.andThen(converter), + aggregation, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation.TerminatingAggregation#all() - */ @Override public AggregationResults all() { - return template.aggregate(aggregation, getCollectionName(aggregation), domainType); + + Assert.notNull(aggregation, "Aggregation must be set first"); + return template.doAggregate(aggregation, getCollectionName(aggregation), domainType, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation.TerminatingAggregation#stream() - */ @Override - public CloseableIterator stream() { - return template.aggregateStream(aggregation, getCollectionName(aggregation), domainType); + public Stream stream() { + + Assert.notNull(aggregation, "Aggregation must be set first"); + return template.doAggregateStream(aggregation, getCollectionName(aggregation), domainType, resultConverter, null); } - private String getCollectionName(Aggregation aggregation) { + private String getCollectionName(@Nullable Aggregation aggregation) { if (StringUtils.hasText(collection)) { return collection; } - if (aggregation instanceof TypedAggregation) { - - TypedAggregation typedAggregation = (TypedAggregation) aggregation; + if (aggregation instanceof TypedAggregation typedAggregation) { if (typedAggregation.getInputType() != null) { return template.getCollectionName(typedAggregation.getInputType()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java index d7becb8cb6..43c0d521c3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,11 +19,16 @@ import java.util.Optional; import java.util.stream.Stream; +import org.jspecify.annotations.Nullable; import org.springframework.dao.DataAccessException; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Window; import org.springframework.data.geo.GeoResults; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.lang.Nullable; +import org.springframework.lang.Contract; import com.mongodb.client.MongoCollection; @@ -43,7 +48,7 @@ * query(Human.class) * .inCollection("star-wars") * .as(Jedi.class) - * .matching(query(where("firstname").is("luke"))) + * .matching(where("firstname").is("luke")) * .all(); * * @@ -67,9 +72,33 @@ public interface ExecutableFindOperation { * Trigger find execution by calling one of the terminating methods. * * @author Christoph Strobl + * @author Mark Paluch * @since 2.0 */ - interface TerminatingFind { + interface TerminatingFind extends TerminatingResults, TerminatingProjection { + + } + + /** + * Trigger find execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 5.0 + */ + interface TerminatingResults { + + /** + * Map the query result to a different type using {@link QueryResultConverter}. + * + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link TerminatingResults}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since 5.0 + */ + @Contract("_ -> new") + TerminatingResults map(QueryResultConverter converter); /** * Get exactly zero or one result. @@ -117,13 +146,44 @@ default Optional first() { /** * Stream all matching elements. * - * @return a {@link Stream} that wraps the a Mongo DB {@link com.mongodb.Cursor} that needs to be closed. Never - * {@literal null}. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). */ Stream stream(); /** - * Get the number of matching elements. + * Return a window of elements either starting or resuming at + * {@link org.springframework.data.domain.ScrollPosition}. + *

+ * When using {@link KeysetScrollPosition}, make sure to use non-nullable + * {@link org.springframework.data.domain.Sort sort properties} as MongoDB does not support criteria to reconstruct + * a query result from absent document fields or {@literal null} values through {@code $gt/$lt} operators. + * + * @param scrollPosition the scroll position. + * @return a window of the resulting elements. + * @since 4.1 + * @see org.springframework.data.domain.OffsetScrollPosition + * @see org.springframework.data.domain.KeysetScrollPosition + */ + Window scroll(ScrollPosition scrollPosition); + + } + + /** + * Trigger find execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 5.0 + */ + interface TerminatingProjection { + + /** + * Get the number of matching elements.
+ * This method uses an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but + * guarantees shard, session and transaction compliance. In case an inaccurate count satisfies the applications + * needs use {@link MongoOperations#estimatedCount(String)} for empty queries instead. * * @return total number of matching elements. */ @@ -135,16 +195,30 @@ default Optional first() { * @return {@literal true} if at least one matching element exists. */ boolean exists(); + } /** - * Trigger geonear execution by calling one of the terminating methods. + * Trigger {@code geoNear} execution by calling one of the terminating methods. * * @author Christoph Strobl + * @author Mark Paluch * @since 2.0 */ interface TerminatingFindNear { + /** + * Map the query result to a different type using {@link QueryResultConverter}. + * + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link TerminatingFindNear}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since 5.0 + */ + @Contract("_ -> new") + TerminatingFindNear map(QueryResultConverter converter); + /** * Find all matching elements and return them as {@link org.springframework.data.geo.GeoResult}. * @@ -170,6 +244,18 @@ interface FindWithQuery extends TerminatingFind { */ TerminatingFind matching(Query query); + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingFind}. + * @throws IllegalArgumentException if criteria is {@literal null}. + * @since 3.0 + */ + default TerminatingFind matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } + /** * Set the filter query for the geoNear execution. * @@ -291,9 +377,21 @@ interface DistinctWithQuery extends DistinctWithProjection { * * @param query must not be {@literal null}. * @return new instance of {@link TerminatingDistinct}. - * @throws IllegalArgumentException if resultType is {@literal null}. + * @throws IllegalArgumentException if query is {@literal null}. */ TerminatingDistinct matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingDistinct}. + * @throws IllegalArgumentException if criteria is {@literal null}. + * @since 3.0 + */ + default TerminatingDistinct matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java index ccc3ba66aa..46289ecfa4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableFindOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,27 +15,24 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; - import java.util.List; -import java.util.Optional; import java.util.stream.Stream; import org.bson.Document; +import org.jspecify.annotations.Nullable; import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Window; +import org.springframework.data.geo.GeoResults; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.SerializationUtils; -import org.springframework.data.util.CloseableIterator; -import org.springframework.data.util.StreamUtils; -import org.springframework.lang.Nullable; +import org.springframework.lang.Contract; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; +import com.mongodb.ReadPreference; import com.mongodb.client.FindIterable; /** @@ -45,23 +42,24 @@ * @author Mark Paluch * @since 2.0 */ -@RequiredArgsConstructor class ExecutableFindOperationSupport implements ExecutableFindOperation { private static final Query ALL_QUERY = new Query(); - private final @NonNull MongoTemplate template; + private final MongoTemplate template; + + ExecutableFindOperationSupport(MongoTemplate template) { + this.template = template; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation#query(java.lang.Class) - */ @Override + @Contract("_ -> new") public ExecutableFind query(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); - return new ExecutableFindSupport<>(template, domainType, domainType, null, ALL_QUERY); + return new ExecutableFindSupport<>(template, domainType, domainType, QueryResultConverter.entity(), null, + ALL_QUERY); } /** @@ -69,59 +67,66 @@ public ExecutableFind query(Class domainType) { * @author Christoph Strobl * @since 2.0 */ - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) - static class ExecutableFindSupport + static class ExecutableFindSupport implements ExecutableFind, FindWithCollection, FindWithProjection, FindWithQuery { - @NonNull MongoTemplate template; - @NonNull Class domainType; - Class returnType; - @Nullable String collection; - Query query; + private final MongoTemplate template; + private final Class domainType; + private final Class returnType; + private final QueryResultConverter resultConverter; + private final @Nullable String collection; + private final Query query; + + ExecutableFindSupport(MongoTemplate template, Class domainType, Class returnType, + QueryResultConverter resultConverter, @Nullable String collection, + Query query) { + this.template = template; + this.domainType = domainType; + this.resultConverter = resultConverter; + this.returnType = returnType; + this.collection = collection; + this.query = query; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithCollection#inCollection(java.lang.String) - */ @Override + @Contract("_ -> new") public FindWithProjection inCollection(String collection) { - Assert.hasText(collection, "Collection name must not be null nor empty!"); + Assert.hasText(collection, "Collection name must not be null nor empty"); - return new ExecutableFindSupport<>(template, domainType, returnType, collection, query); + return new ExecutableFindSupport<>(template, domainType, returnType, resultConverter, collection, query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithProjection#as(Class) - */ @Override + @Contract("_ -> new") public FindWithQuery as(Class returnType) { - Assert.notNull(returnType, "ReturnType must not be null!"); + Assert.notNull(returnType, "ReturnType must not be null"); - return new ExecutableFindSupport<>(template, domainType, returnType, collection, query); + return new ExecutableFindSupport<>(template, domainType, returnType, QueryResultConverter.entity(), collection, + query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ @Override + @Contract("_ -> new") public TerminatingFind matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); - return new ExecutableFindSupport<>(template, domainType, returnType, collection, query); + return new ExecutableFindSupport<>(template, domainType, returnType, resultConverter, collection, query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#oneValue() - */ @Override - public T oneValue() { + public TerminatingResults map(QueryResultConverter converter) { + + Assert.notNull(converter, "QueryResultConverter must not be null"); + + return new ExecutableFindSupport<>(template, domainType, returnType, this.resultConverter.andThen(converter), + collection, query); + } + + @Override + public @Nullable T oneValue() { List result = doFind(new DelegatingQueryCursorPreparer(getCursorPreparer(query, null)).limit(2)); @@ -130,78 +135,56 @@ public T oneValue() { } if (result.size() > 1) { - throw new IncorrectResultSizeDataAccessException("Query " + asString() + " returned non unique result.", 1); + throw new IncorrectResultSizeDataAccessException("Query " + asString() + " returned non unique result", 1); } return result.iterator().next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#firstValue() - */ @Override - public T firstValue() { + public @Nullable T firstValue() { List result = doFind(new DelegatingQueryCursorPreparer(getCursorPreparer(query, null)).limit(1)); return ObjectUtils.isEmpty(result) ? null : result.iterator().next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#all() - */ @Override public List all() { return doFind(null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#stream() - */ @Override public Stream stream() { - return StreamUtils.createStreamFromIterator(doStream()); + return doStream(); + } + + @Override + public Window scroll(ScrollPosition scrollPosition) { + return template.doScroll(query.with(scrollPosition), domainType, returnType, resultConverter, + getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindWithQuery#near(org.springframework.data.mongodb.core.query.NearQuery) - */ @Override public TerminatingFindNear near(NearQuery nearQuery) { - return () -> template.geoNear(nearQuery, domainType, getCollectionName(), returnType); + return new TerminatingFindNearSupport<>(nearQuery, this.resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#count() - */ @Override public long count() { return template.count(query, domainType, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingFind#exists() - */ @Override public boolean exists() { return template.exists(query, domainType, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.FindDistinct#distinct(java.lang.String) - */ @SuppressWarnings("unchecked") @Override public TerminatingDistinct distinct(String field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); return new DistinctOperationSupport(this, field); } @@ -211,18 +194,18 @@ private List doFind(@Nullable CursorPreparer preparer) { Document queryObject = query.getQueryObject(); Document fieldsObject = query.getFieldsObject(); - return template.doFind(getCollectionName(), queryObject, fieldsObject, domainType, returnType, - getCursorPreparer(query, preparer)); + return template.doFind(template.createDelegate(query), getCollectionName(), queryObject, fieldsObject, domainType, + returnType, resultConverter, getCursorPreparer(query, preparer)); } private List doFindDistinct(String field) { return template.findDistinct(query, field, getCollectionName(), domainType, - returnType == domainType ? (Class) Object.class : returnType); + returnType == domainType ? (Class) Object.class : returnType); } - private CloseableIterator doStream() { - return template.doStream(query, domainType, getCollectionName(), returnType); + private Stream doStream() { + return template.doStream(query, domainType, getCollectionName(), returnType, resultConverter); } private CursorPreparer getCursorPreparer(Query query, @Nullable CursorPreparer preparer) { @@ -236,6 +219,31 @@ private String getCollectionName() { private String asString() { return SerializationUtils.serializeToJsonSafely(query); } + + class TerminatingFindNearSupport implements TerminatingFindNear { + + private final NearQuery nearQuery; + private final QueryResultConverter resultConverter; + + public TerminatingFindNearSupport(NearQuery nearQuery, + QueryResultConverter resultConverter) { + this.nearQuery = nearQuery; + this.resultConverter = resultConverter; + } + + @Override + public TerminatingFindNear map(QueryResultConverter converter) { + + Assert.notNull(converter, "QueryResultConverter must not be null"); + + return new TerminatingFindNearSupport<>(nearQuery, this.resultConverter.andThen(converter)); + } + + @Override + public GeoResults all() { + return template.doGeoNear(nearQuery, domainType, getCollectionName(), returnType, resultConverter); + } + } } /** @@ -245,77 +253,73 @@ private String asString() { static class DelegatingQueryCursorPreparer implements CursorPreparer { private final @Nullable CursorPreparer delegate; - private Optional limit = Optional.empty(); + private int limit = -1; DelegatingQueryCursorPreparer(@Nullable CursorPreparer delegate) { this.delegate = delegate; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.CursorPreparer#prepare(com.mongodb.clientFindIterable) - */ @Override - public FindIterable prepare(FindIterable cursor) { + public FindIterable prepare(FindIterable iterable) { - FindIterable target = delegate != null ? delegate.prepare(cursor) : cursor; - return limit.map(target::limit).orElse(target); + FindIterable target = delegate != null ? delegate.prepare(iterable) : iterable; + if (limit >= 0) { + target.limit(limit); + } + return target; } + @Contract("_ -> this") CursorPreparer limit(int limit) { - this.limit = Optional.of(limit); + this.limit = limit; return this; } + + @Override + public @Nullable ReadPreference getReadPreference() { + return delegate != null ? delegate.getReadPreference() : null; + } } /** * @author Christoph Strobl * @since 2.1 */ - static class DistinctOperationSupport implements TerminatingDistinct { + static class DistinctOperationSupport implements TerminatingDistinct { private final String field; - private final ExecutableFindSupport delegate; + private final ExecutableFindSupport delegate; - public DistinctOperationSupport(ExecutableFindSupport delegate, String field) { + public DistinctOperationSupport(ExecutableFindSupport delegate, String field) { this.delegate = delegate; this.field = field; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.DistinctWithProjection#as(java.lang.Class) - */ @Override - @SuppressWarnings("unchecked") + @SuppressWarnings({ "unchecked", "rawtypes" }) + @Contract("_ -> new") public TerminatingDistinct as(Class resultType) { - Assert.notNull(resultType, "ResultType must not be null!"); + Assert.notNull(resultType, "ResultType must not be null"); return new DistinctOperationSupport<>((ExecutableFindSupport) delegate.as(resultType), field); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.DistinctWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ @Override + @Contract("_ -> new") public TerminatingDistinct matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); - return new DistinctOperationSupport<>((ExecutableFindSupport) delegate.matching(query), field); + return new DistinctOperationSupport<>((ExecutableFindSupport) delegate.matching(query), field); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation.TerminatingDistinct#all() - */ @Override public List all() { return delegate.doFindDistinct(field); } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperation.java index 4d8672b2ec..c2b08c7e59 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupport.java index 22fd6d08b2..599a910035 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableInsertOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,12 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; - import java.util.ArrayList; import java.util.Collection; +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.BulkOperations.BulkMode; -import org.springframework.lang.Nullable; +import org.springframework.lang.Contract; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -37,19 +33,19 @@ * @author Mark Paluch * @since 2.0 */ -@RequiredArgsConstructor class ExecutableInsertOperationSupport implements ExecutableInsertOperation { - private final @NonNull MongoTemplate template; + private final MongoTemplate template; + + ExecutableInsertOperationSupport(MongoTemplate template) { + this.template = template; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.coreExecutableInsertOperation#insert(java.lan.Class) - */ @Override + @Contract("_ -> new") public ExecutableInsert insert(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ExecutableInsertSupport<>(template, domainType, null, null); } @@ -58,72 +54,61 @@ public ExecutableInsert insert(Class domainType) { * @author Christoph Strobl * @since 2.0 */ - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) static class ExecutableInsertSupport implements ExecutableInsert { - @NonNull MongoTemplate template; - @NonNull Class domainType; - @Nullable String collection; - @Nullable BulkMode bulkMode; + private final MongoTemplate template; + private final Class domainType; + private final @Nullable String collection; + private final @Nullable BulkMode bulkMode; + + ExecutableInsertSupport(MongoTemplate template, Class domainType, @Nullable String collection, + @Nullable BulkMode bulkMode) { + + this.template = template; + this.domainType = domainType; + this.collection = collection; + this.bulkMode = bulkMode; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation.TerminatingInsert#insert(java.lang.Class) - */ @Override public T one(T object) { - Assert.notNull(object, "Object must not be null!"); + Assert.notNull(object, "Object must not be null"); return template.insert(object, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation.TerminatingInsert#all(java.util.Collection) - */ @Override public Collection all(Collection objects) { - Assert.notNull(objects, "Objects must not be null!"); + Assert.notNull(objects, "Objects must not be null"); return template.insert(objects, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation.TerminatingBulkInsert#bulk(java.util.Collection) - */ @Override public BulkWriteResult bulk(Collection objects) { - Assert.notNull(objects, "Objects must not be null!"); + Assert.notNull(objects, "Objects must not be null"); return template.bulkOps(bulkMode != null ? bulkMode : BulkMode.ORDERED, domainType, getCollectionName()) .insert(new ArrayList<>(objects)).execute(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation.InsertWithCollection#inCollection(java.lang.String) - */ @Override + @Contract("_ -> new") public InsertWithBulkMode inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty."); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ExecutableInsertSupport<>(template, domainType, collection, bulkMode); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation.InsertWithBulkMode#withBulkMode(org.springframework.data.mongodb.core.BulkMode) - */ @Override + @Contract("_ -> new") public TerminatingBulkInsert withBulkMode(BulkMode bulkMode) { - Assert.notNull(bulkMode, "BulkMode must not be null!"); + Assert.notNull(bulkMode, "BulkMode must not be null"); return new ExecutableInsertSupport<>(template, domainType, collection, bulkMode); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperation.java index 508024e2e6..2d13ad3ea0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,6 +19,7 @@ import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.Query; /** @@ -30,7 +31,7 @@ * The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there * via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the * collection name for the execution. - * + * *
  *     
  *         mapReduce(Human.class)
@@ -44,6 +45,7 @@
  * 
* * @author Christoph Strobl + * @author Mark Paluch * @since 2.1 */ public interface ExecutableMapReduceOperation { @@ -146,6 +148,18 @@ interface MapReduceWithQuery extends TerminatingMapReduce { * @throws IllegalArgumentException if query is {@literal null}. */ TerminatingMapReduce matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingMapReduce}. + * @throws IllegalArgumentException if query is {@literal null}. + * @since 3.0 + */ + default TerminatingMapReduce matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } } /** @@ -173,7 +187,9 @@ interface MapReduceWithProjection extends MapReduceWithQuery { * * @author Christoph Strobl * @since 2.1 + * @deprecated since 4.0 in favor of {@link org.springframework.data.mongodb.core.aggregation}. */ + @Deprecated interface MapReduceWithOptions { /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupport.java index c4bdaf4254..55864cbd8e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableMapReduceOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,12 @@ */ package org.springframework.data.mongodb.core; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; - import java.util.List; +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.lang.Nullable; +import org.springframework.lang.Contract; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -32,21 +30,27 @@ * @author Christoph Strobl * @since 2.1 */ -@RequiredArgsConstructor class ExecutableMapReduceOperationSupport implements ExecutableMapReduceOperation { private static final Query ALL_QUERY = new Query(); - private final @NonNull MongoTemplate template; + private final MongoTemplate template; + + ExecutableMapReduceOperationSupport(MongoTemplate template) { + + Assert.notNull(template, "Template must not be null"); + this.template = template; + } /* * (non-Javascript) * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation#mapReduce(java.lang.Class) */ @Override + @Contract("_ -> new") public ExecutableMapReduceSupport mapReduce(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ExecutableMapReduceSupport<>(template, domainType, domainType, null, ALL_QUERY, null, null, null); } @@ -87,6 +91,7 @@ static class ExecutableMapReduceSupport * @see in org.springframework.data.mongodb.core.ExecutableMapReduceOperation.TerminatingMapReduce#all() */ @Override + @SuppressWarnings("NullAway") public List all() { return template.mapReduce(query, domainType, getCollectionName(), mapFunction, reduceFunction, options, returnType); @@ -99,7 +104,7 @@ public List all() { @Override public MapReduceWithProjection inCollection(String collection) { - Assert.hasText(collection, "Collection name must not be null nor empty!"); + Assert.hasText(collection, "Collection name must not be null nor empty"); return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, reduceFunction, options); @@ -112,7 +117,7 @@ public MapReduceWithProjection inCollection(String collection) { @Override public TerminatingMapReduce matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, reduceFunction, options); @@ -125,7 +130,7 @@ public TerminatingMapReduce matching(Query query) { @Override public MapReduceWithQuery as(Class resultType) { - Assert.notNull(resultType, "ResultType must not be null!"); + Assert.notNull(resultType, "ResultType must not be null"); return new ExecutableMapReduceSupport<>(template, domainType, resultType, collection, query, mapFunction, reduceFunction, options); @@ -138,7 +143,7 @@ public MapReduceWithQuery as(Class resultType) { @Override public ExecutableMapReduce with(MapReduceOptions options) { - Assert.notNull(options, "Options must not be null! Please consider empty MapReduceOptions#options() instead."); + Assert.notNull(options, "Options must not be null Please consider empty MapReduceOptions#options() instead"); return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, reduceFunction, options); @@ -151,7 +156,7 @@ public ExecutableMapReduce with(MapReduceOptions options) { @Override public MapReduceWithReduceFunction map(String mapFunction) { - Assert.hasText(mapFunction, "MapFunction name must not be null nor empty!"); + Assert.hasText(mapFunction, "MapFunction name must not be null nor empty"); return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, reduceFunction, options); @@ -164,7 +169,7 @@ public MapReduceWithReduceFunction map(String mapFunction) { @Override public ExecutableMapReduce reduce(String reduceFunction) { - Assert.hasText(reduceFunction, "ReduceFunction name must not be null nor empty!"); + Assert.hasText(reduceFunction, "ReduceFunction name must not be null nor empty"); return new ExecutableMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction, reduceFunction, options); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperation.java index 2bd72e8534..c29a448f1c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,7 +17,9 @@ import java.util.List; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Contract; import com.mongodb.client.result.DeleteResult; @@ -53,11 +55,40 @@ public interface ExecutableRemoveOperation { */ ExecutableRemove remove(Class domainType); + /** + * @author Christoph Strobl + * @since 5.0 + */ + interface TerminatingResults { + + /** + * Map the query result to a different type using {@link QueryResultConverter}. + * + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link ExecutableFindOperation.TerminatingResults}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since 5.0 + */ + @Contract("_ -> new") + TerminatingResults map(QueryResultConverter converter); + + /** + * Remove and return all matching documents.
+ * NOTE: The entire list of documents will be fetched before sending the actual delete commands. + * Also, {@link org.springframework.context.ApplicationEvent}s will be published for each and every delete + * operation. + * + * @return empty {@link List} if no match found. Never {@literal null}. + */ + List findAndRemove(); + } + /** * @author Christoph Strobl * @since 2.0 */ - interface TerminatingRemove { + interface TerminatingRemove extends TerminatingResults { /** * Remove all documents matching. @@ -72,16 +103,6 @@ interface TerminatingRemove { * @return the {@link DeleteResult}. Never {@literal null}. */ DeleteResult one(); - - /** - * Remove and return all matching documents.
- * NOTE The entire list of documents will be fetched before sending the actual delete commands. - * Also, {@link org.springframework.context.ApplicationEvent}s will be published for each and every delete - * operation. - * - * @return empty {@link List} if no match found. Never {@literal null}. - */ - List findAndRemove(); } /** @@ -104,7 +125,6 @@ interface RemoveWithCollection extends RemoveWithQuery { RemoveWithQuery inCollection(String collection); } - /** * @author Christoph Strobl * @since 2.0 @@ -119,6 +139,18 @@ interface RemoveWithQuery extends TerminatingRemove { * @throws IllegalArgumentException if query is {@literal null}. */ TerminatingRemove matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingRemove}. + * @throws IllegalArgumentException if query is {@literal null}. + * @since 3.0 + */ + default TerminatingRemove matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupport.java index 7cc376680f..7817a7c8af 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableRemoveOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,15 +15,11 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; - import java.util.List; +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.lang.Nullable; +import org.springframework.lang.Contract; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -36,90 +32,86 @@ * @author Mark Paluch * @since 2.0 */ -@RequiredArgsConstructor class ExecutableRemoveOperationSupport implements ExecutableRemoveOperation { private static final Query ALL_QUERY = new Query(); - private final @NonNull MongoTemplate tempate; + private final MongoTemplate tempate; + + public ExecutableRemoveOperationSupport(MongoTemplate tempate) { + this.tempate = tempate; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation#remove(java.lang.Class) - */ @Override + @Contract("_ -> new") public ExecutableRemove remove(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); - return new ExecutableRemoveSupport<>(tempate, domainType, ALL_QUERY, null); + return new ExecutableRemoveSupport<>(tempate, domainType, ALL_QUERY, null, QueryResultConverter.entity()); } /** * @author Christoph Strobl * @since 2.0 */ - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) - static class ExecutableRemoveSupport implements ExecutableRemove, RemoveWithCollection { - - @NonNull MongoTemplate template; - @NonNull Class domainType; - Query query; - @Nullable String collection; - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.RemoveWithCollection#inCollection(java.lang.String) - */ + static class ExecutableRemoveSupport implements ExecutableRemove, RemoveWithCollection { + + private final MongoTemplate template; + private final Class domainType; + private final Query query; + @Nullable private final String collection; + private final QueryResultConverter resultConverter; + + public ExecutableRemoveSupport(MongoTemplate template, Class domainType, Query query, + @Nullable String collection, QueryResultConverter resultConverter) { + this.template = template; + this.domainType = domainType; + this.query = query; + this.collection = collection; + this.resultConverter = resultConverter; + } + @Override + @Contract("_ -> new") public RemoveWithQuery inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); - return new ExecutableRemoveSupport<>(template, domainType, query, collection); + return new ExecutableRemoveSupport<>(template, domainType, query, collection, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.RemoveWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ @Override + @Contract("_ -> new") public TerminatingRemove matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); - return new ExecutableRemoveSupport<>(template, domainType, query, collection); + return new ExecutableRemoveSupport<>(template, domainType, query, collection, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.TerminatingRemove#all() - */ @Override public DeleteResult all() { return template.doRemove(getCollectionName(), query, domainType, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.TerminatingRemove#one() - */ @Override public DeleteResult one() { return template.doRemove(getCollectionName(), query, domainType, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation.TerminatingRemove#findAndRemove() - */ @Override public List findAndRemove() { String collectionName = getCollectionName(); - return template.doFindAndDelete(collectionName, query, domainType); + return template.doFindAndDelete(collectionName, query, domainType, resultConverter); + } + + @Override + @SuppressWarnings({"unchecked", "rawtypes"}) + public TerminatingResults map(QueryResultConverter converter) { + return new ExecutableRemoveSupport<>(template, (Class) domainType, query, collection, converter); } private String getCollectionName() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java index 0c0d44fcbf..e671b7b7ce 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,9 +17,14 @@ import java.util.Optional; +import org.jspecify.annotations.Nullable; + +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; -import org.springframework.lang.Nullable; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.lang.Contract; import com.mongodb.client.result.UpdateResult; @@ -66,6 +71,18 @@ public interface ExecutableUpdateOperation { */ interface TerminatingFindAndModify { + /** + * Map the query result to a different type using {@link QueryResultConverter}. + * + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link TerminatingFindAndModify}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since 5.0 + */ + @Contract("_ -> new") + TerminatingFindAndModify map(QueryResultConverter converter); + /** * Find, modify and return the first matching document. * @@ -84,15 +101,32 @@ default Optional findAndModify() { T findAndModifyValue(); } + /** + * Trigger replaceOne + * execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 4.2 + */ + interface TerminatingReplace { + + /** + * Find first and replace/upsert. + * + * @return never {@literal null}. + */ + UpdateResult replaceFirst(); + } + /** * Trigger - * findOneAndReplace + * findOneAndReplace * execution by calling one of the terminating methods. * * @author Mark Paluch * @since 2.1 */ - interface TerminatingFindAndReplace { + interface TerminatingFindAndReplace extends TerminatingReplace { /** * Find, replace and return the first matching document. @@ -110,6 +144,19 @@ default Optional findAndReplace() { */ @Nullable T findAndReplaceValue(); + + /** + * Map the query result to a different type using {@link QueryResultConverter}. + * + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link TerminatingFindAndModify}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since 5.0 + */ + @Contract("_ -> new") + TerminatingFindAndReplace map(QueryResultConverter converter); + } /** @@ -151,13 +198,16 @@ interface TerminatingUpdate extends TerminatingFindAndModify, FindAndModif interface UpdateWithUpdate { /** - * Set the {@link Update} to be applied. + * Set the {@link UpdateDefinition} to be applied. * * @param update must not be {@literal null}. * @return new instance of {@link TerminatingUpdate}. * @throws IllegalArgumentException if update is {@literal null}. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - TerminatingUpdate apply(Update update); + TerminatingUpdate apply(UpdateDefinition update); /** * Specify {@code replacement} object. @@ -205,6 +255,18 @@ interface UpdateWithQuery extends UpdateWithUpdate { * @throws IllegalArgumentException if query is {@literal null}. */ UpdateWithUpdate matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link UpdateWithUpdate}. + * @throws IllegalArgumentException if query is {@literal null}. + * @since 3.0 + */ + default UpdateWithUpdate matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } } /** @@ -225,6 +287,22 @@ interface FindAndModifyWithOptions { TerminatingFindAndModify withOptions(FindAndModifyOptions options); } + /** + * @author Christoph Strobl + * @since 4.2 + */ + interface ReplaceWithOptions extends TerminatingReplace { + + /** + * Explicitly define {@link ReplaceOptions}. + * + * @param options must not be {@literal null}. + * @return new instance of {@link FindAndReplaceOptions}. + * @throws IllegalArgumentException if options is {@literal null}. + */ + TerminatingReplace withOptions(ReplaceOptions options); + } + /** * Define {@link FindAndReplaceOptions}. * @@ -232,7 +310,7 @@ interface FindAndModifyWithOptions { * @author Christoph Strobl * @since 2.1 */ - interface FindAndReplaceWithOptions extends TerminatingFindAndReplace { + interface FindAndReplaceWithOptions extends TerminatingFindAndReplace, ReplaceWithOptions { /** * Explicitly define {@link FindAndReplaceOptions} for the {@link Update}. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupport.java index a07969d16e..dc9ce5cacc 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ExecutableUpdateOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,10 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; - +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.mongodb.core.query.Update; -import org.springframework.lang.Nullable; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.lang.Contract; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -35,187 +31,197 @@ * @author Mark Paluch * @since 2.0 */ -@RequiredArgsConstructor class ExecutableUpdateOperationSupport implements ExecutableUpdateOperation { private static final Query ALL_QUERY = new Query(); - private final @NonNull MongoTemplate template; + private final MongoTemplate template; + + ExecutableUpdateOperationSupport(MongoTemplate template) { + this.template = template; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation#update(java.lang.Class) - */ @Override + @Contract("_ -> new") public ExecutableUpdate update(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); - return new ExecutableUpdateSupport<>(template, domainType, ALL_QUERY, null, null, null, null, null, domainType); + return new ExecutableUpdateSupport<>(template, domainType, ALL_QUERY, null, null, null, null, null, domainType, QueryResultConverter.entity()); } /** * @author Christoph Strobl * @since 2.0 */ - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) - static class ExecutableUpdateSupport + @SuppressWarnings("rawtypes") + static class ExecutableUpdateSupport implements ExecutableUpdate, UpdateWithCollection, UpdateWithQuery, TerminatingUpdate, FindAndReplaceWithOptions, TerminatingFindAndReplace, FindAndReplaceWithProjection { - @NonNull MongoTemplate template; - @NonNull Class domainType; - Query query; - @Nullable Update update; - @Nullable String collection; - @Nullable FindAndModifyOptions findAndModifyOptions; - @Nullable FindAndReplaceOptions findAndReplaceOptions; - @Nullable Object replacement; - @NonNull Class targetType; - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate#apply(Update) - */ + private final MongoTemplate template; + private final Class domainType; + private final Query query; + @Nullable private final UpdateDefinition update; + @Nullable private final String collection; + @Nullable private final FindAndModifyOptions findAndModifyOptions; + @Nullable private final FindAndReplaceOptions findAndReplaceOptions; + @Nullable private final Object replacement; + private final QueryResultConverter resultConverter; + private final Class targetType; + + ExecutableUpdateSupport(MongoTemplate template, Class domainType, Query query, @Nullable UpdateDefinition update, + @Nullable String collection, @Nullable FindAndModifyOptions findAndModifyOptions, + @Nullable FindAndReplaceOptions findAndReplaceOptions, @Nullable Object replacement, Class targetType, + QueryResultConverter resultConverter) { + + this.template = template; + this.domainType = domainType; + this.query = query; + this.update = update; + this.collection = collection; + this.findAndModifyOptions = findAndModifyOptions; + this.findAndReplaceOptions = findAndReplaceOptions; + this.replacement = replacement; + this.targetType = targetType; + this.resultConverter = resultConverter; + } + @Override - public TerminatingUpdate apply(Update update) { + @Contract("_ -> new") + public TerminatingUpdate apply(UpdateDefinition update) { - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(update, "Update must not be null"); return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, - findAndReplaceOptions, replacement, targetType); + findAndReplaceOptions, replacement, targetType, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithCollection#inCollection(java.lang.String) - */ @Override + @Contract("_ -> new") public UpdateWithQuery inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, - findAndReplaceOptions, replacement, targetType); + findAndReplaceOptions, replacement, targetType, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.FindAndModifyWithOptions#withOptions(org.springframework.data.mongodb.core.FindAndModifyOptions) - */ @Override + @Contract("_ -> new") public TerminatingFindAndModify withOptions(FindAndModifyOptions options) { - Assert.notNull(options, "Options must not be null!"); + Assert.notNull(options, "Options must not be null"); return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, options, - findAndReplaceOptions, replacement, targetType); + findAndReplaceOptions, replacement, targetType, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.UpdateWithUpdate#replaceWith(Object) - */ @Override + @Contract("_ -> new") public FindAndReplaceWithProjection replaceWith(T replacement) { - Assert.notNull(replacement, "Replacement must not be null!"); + Assert.notNull(replacement, "Replacement must not be null"); return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, - findAndReplaceOptions, replacement, targetType); + findAndReplaceOptions, replacement, targetType, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.FindAndReplaceWithOptions#withOptions(org.springframework.data.mongodb.core.FindAndReplaceOptions) - */ @Override + @Contract("_ -> new") public FindAndReplaceWithProjection withOptions(FindAndReplaceOptions options) { - Assert.notNull(options, "Options must not be null!"); + Assert.notNull(options, "Options must not be null"); + + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + options, replacement, targetType, resultConverter); + } + + @Override + @Contract("_ -> new") + public TerminatingReplace withOptions(ReplaceOptions options) { + FindAndReplaceOptions target = new FindAndReplaceOptions(); + if (options.isUpsert()) { + target.upsert(); + } return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, - options, replacement, targetType); + target, replacement, targetType, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ @Override + @Contract("_ -> new") public UpdateWithUpdate matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, - findAndReplaceOptions, replacement, targetType); + findAndReplaceOptions, replacement, targetType, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.FindAndReplaceWithProjection#as(java.lang.Class) - */ @Override + @Contract("_ -> new") public FindAndReplaceWithOptions as(Class resultType) { - Assert.notNull(resultType, "ResultType must not be null!"); + Assert.notNull(resultType, "ResultType must not be null"); return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, - findAndReplaceOptions, replacement, resultType); + findAndReplaceOptions, replacement, resultType, QueryResultConverter.entity()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingUpdate#all() - */ @Override public UpdateResult all() { return doUpdate(true, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingUpdate#first() - */ @Override public UpdateResult first() { return doUpdate(false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingUpdate#upsert() - */ @Override public UpdateResult upsert() { return doUpdate(true, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingFindAndModify#findAndModifyValue() - */ @Override + public ExecutableUpdateSupport map(QueryResultConverter converter) { + return new ExecutableUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType, this.resultConverter.andThen(converter)); + } + + @Override + @SuppressWarnings("NullAway") public @Nullable T findAndModifyValue() { return template.findAndModify(query, update, findAndModifyOptions != null ? findAndModifyOptions : new FindAndModifyOptions(), targetType, - getCollectionName()); + getCollectionName(), resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation.TerminatingFindAndReplace#findAndReplaceValue() - */ @Override + @SuppressWarnings({ "unchecked", "NullAway" }) public @Nullable T findAndReplaceValue() { return (T) template.findAndReplace(query, replacement, - findAndReplaceOptions != null ? findAndReplaceOptions : FindAndReplaceOptions.empty(), domainType, - getCollectionName(), targetType); + findAndReplaceOptions != null ? findAndReplaceOptions : FindAndReplaceOptions.empty(), (Class) domainType, + getCollectionName(), targetType, (QueryResultConverter) resultConverter); + } + + @Override + @SuppressWarnings({ "unchecked", "NullAway" }) + public UpdateResult replaceFirst() { + + if (replacement != null) { + return template.replace(query, domainType, replacement, + findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName()); + } + + return template.replace(query, domainType, update, + findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName()); } + @SuppressWarnings("NullAway") private UpdateResult doUpdate(boolean multi, boolean upsert) { return template.doUpdate(getCollectionName(), query, update, domainType, upsert, multi); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndModifyOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndModifyOptions.java index 3d4d3ce7ff..6e9b775324 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndModifyOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndModifyOptions.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,8 +17,9 @@ import java.util.Optional; +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.query.Collation; -import org.springframework.lang.Nullable; +import org.springframework.lang.Contract; /** * @author Mark Pollak @@ -33,6 +34,31 @@ public class FindAndModifyOptions { private @Nullable Collation collation; + private static final FindAndModifyOptions NONE = new FindAndModifyOptions() { + + private static final String ERROR_MSG = "FindAndModifyOptions.none() cannot be changed; Please use FindAndModifyOptions.options() instead"; + + @Override + public FindAndModifyOptions returnNew(boolean returnNew) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public FindAndModifyOptions upsert(boolean upsert) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public FindAndModifyOptions remove(boolean remove) { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public FindAndModifyOptions collation(@Nullable Collation collation) { + throw new UnsupportedOperationException(ERROR_MSG); + } + }; + /** * Static factory method to create a FindAndModifyOptions instance * @@ -43,8 +69,18 @@ public static FindAndModifyOptions options() { } /** - * Create new {@link FindAndModifyOptions} based on option of given {@litearl source}. - * + * Static factory method returning an unmodifiable {@link FindAndModifyOptions} instance. + * + * @return unmodifiable {@link FindAndModifyOptions} instance. + * @since 2.2 + */ + public static FindAndModifyOptions none() { + return NONE; + } + + /** + * Create new {@link FindAndModifyOptions} based on option of given {@literal source}. + * * @param source can be {@literal null}. * @return new instance of {@link FindAndModifyOptions}. * @since 2.0 @@ -64,16 +100,19 @@ public static FindAndModifyOptions of(@Nullable FindAndModifyOptions source) { return options; } + @Contract("_ -> this") public FindAndModifyOptions returnNew(boolean returnNew) { this.returnNew = returnNew; return this; } + @Contract("_ -> this") public FindAndModifyOptions upsert(boolean upsert) { this.upsert = upsert; return this; } + @Contract("_ -> this") public FindAndModifyOptions remove(boolean remove) { this.remove = remove; return this; @@ -82,10 +121,11 @@ public FindAndModifyOptions remove(boolean remove) { /** * Define the {@link Collation} specifying language-specific rules for string comparison. * - * @param collation - * @return + * @param collation can be {@literal null}. + * @return this. * @since 2.0 */ + @Contract("_ -> this") public FindAndModifyOptions collation(@Nullable Collation collation) { this.collation = collation; @@ -107,7 +147,7 @@ public boolean isRemove() { /** * Get the {@link Collation} specifying language-specific rules for string comparison. * - * @return + * @return never {@literal null}. * @since 2.0 */ public Optional getCollation() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java index c4c6f52636..2005ba3c6c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindAndReplaceOptions.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,9 +15,11 @@ */ package org.springframework.data.mongodb.core; +import org.springframework.lang.Contract; + /** * Options for - * findOneAndReplace. + * findOneAndReplace. *
* Defaults to *
@@ -31,10 +33,24 @@ * @author Christoph Strobl * @since 2.1 */ -public class FindAndReplaceOptions { +public class FindAndReplaceOptions extends ReplaceOptions { private boolean returnNew; - private boolean upsert; + + private static final FindAndReplaceOptions NONE = new FindAndReplaceOptions() { + + private static final String ERROR_MSG = "FindAndReplaceOptions.none() cannot be changed; Please use FindAndReplaceOptions.options() instead"; + + @Override + public FindAndReplaceOptions returnNew() { + throw new UnsupportedOperationException(ERROR_MSG); + } + + @Override + public FindAndReplaceOptions upsert() { + throw new UnsupportedOperationException(ERROR_MSG); + } + }; /** * Static factory method to create a {@link FindAndReplaceOptions} instance. @@ -51,6 +67,16 @@ public static FindAndReplaceOptions options() { return new FindAndReplaceOptions(); } + /** + * Static factory method returning an unmodifiable {@link FindAndReplaceOptions} instance. + * + * @return unmodifiable {@link FindAndReplaceOptions} instance. + * @since 2.2 + */ + public static FindAndReplaceOptions none() { + return NONE; + } + /** * Static factory method to create a {@link FindAndReplaceOptions} instance with *
@@ -71,6 +97,7 @@ public static FindAndReplaceOptions empty() { * * @return this. */ + @Contract("-> this") public FindAndReplaceOptions returnNew() { this.returnNew = true; @@ -82,28 +109,20 @@ public FindAndReplaceOptions returnNew() { * * @return this. */ + @Contract("-> this") public FindAndReplaceOptions upsert() { - this.upsert = true; + super.upsert(); return this; } /** * Get the bit indicating to return the replacement document. * - * @return + * @return {@literal true} if set. */ public boolean isReturnNew() { return returnNew; } - /** - * Get the bit indicating if to create a new document if not exists. - * - * @return - */ - public boolean isUpsert() { - return upsert; - } - } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java index edb902dea7..f04417325c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FindPublisherPreparer.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,19 +15,68 @@ */ package org.springframework.data.mongodb.core; +import java.util.function.Function; + +import org.bson.Document; +import org.jspecify.annotations.Nullable; +import org.springframework.util.Assert; + +import com.mongodb.ReadPreference; import com.mongodb.reactivestreams.client.FindPublisher; +import com.mongodb.reactivestreams.client.MongoCollection; /** * Simple callback interface to allow customization of a {@link FindPublisher}. * * @author Mark Paluch + * @author Christoph Strobl + * @author Konstantin Volivach */ -interface FindPublisherPreparer { +public interface FindPublisherPreparer extends ReadPreferenceAware { + + /** + * Default {@link FindPublisherPreparer} just passing on the given {@link FindPublisher}. + * + * @since 2.2 + */ + FindPublisherPreparer NO_OP_PREPARER = (findPublisher -> findPublisher); /** * Prepare the given cursor (apply limits, skips and so on). Returns the prepared cursor. * * @param findPublisher must not be {@literal null}. */ - FindPublisher prepare(FindPublisher findPublisher); + FindPublisher prepare(FindPublisher findPublisher); + + /** + * Apply query specific settings to {@link MongoCollection} and initiate a find operation returning a + * {@link FindPublisher} via the given {@link Function find} function. + * + * @param collection must not be {@literal null}. + * @param find must not be {@literal null}. + * @return never {@literal null}. + * @throws IllegalArgumentException if one of the required arguments is {@literal null}. + * @since 2.2 + */ + default FindPublisher initiateFind(MongoCollection collection, + Function, FindPublisher> find) { + + Assert.notNull(collection, "Collection must not be null"); + Assert.notNull(find, "Find function must not be null"); + + if (hasReadPreference()) { + collection = collection.withReadPreference(getReadPreference()); + } + + return prepare(find.apply(collection)); + } + + /** + * @return the {@link ReadPreference} to apply or {@literal null} if none defined. + * @since 2.2 + */ + @Override + default @Nullable ReadPreference getReadPreference() { + return null; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FluentMongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FluentMongoOperations.java index ff2812020c..906afddd4a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FluentMongoOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/FluentMongoOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoCommandStatistics.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoCommandStatistics.java index 62a37a0b14..654e7d4330 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoCommandStatistics.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/GeoCommandStatistics.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -39,7 +39,7 @@ class GeoCommandStatistics { */ private GeoCommandStatistics(Document source) { - Assert.notNull(source, "Source document must not be null!"); + Assert.notNull(source, "Source document must not be null"); this.source = source; } @@ -47,11 +47,11 @@ private GeoCommandStatistics(Document source) { * Creates a new {@link GeoCommandStatistics} from the given command result extracting the statistics. * * @param commandResult must not be {@literal null}. - * @return + * @return never {@literal null}. */ public static GeoCommandStatistics from(Document commandResult) { - Assert.notNull(commandResult, "Command result must not be null!"); + Assert.notNull(commandResult, "Command result must not be null"); Object stats = commandResult.get("stats"); return stats == null ? NONE : new GeoCommandStatistics((Document) stats); @@ -61,7 +61,7 @@ public static GeoCommandStatistics from(Document commandResult) { * Returns the average distance reported by the command result. Mitigating a removal of the field in case the command * didn't return any result introduced in MongoDB 3.2 RC1. * - * @return + * @return never {@literal null}, uses {@link Double#NaN} if {@literal avgDistance} does not exist. * @see MongoDB Jira SERVER-21024 */ public double getAverageDistance() { diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/HintFunction.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/HintFunction.java new file mode 100644 index 0000000000..043613122a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/HintFunction.java @@ -0,0 +1,129 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.function.Function; + +import org.bson.conversions.Bson; +import org.jspecify.annotations.Nullable; +import org.springframework.data.mongodb.CodecRegistryProvider; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.util.StringUtils; + +/** + * Function object to apply a query hint. Can be an index name or a BSON document. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 4.1 + */ +class HintFunction { + + private static final HintFunction EMPTY = new HintFunction(null); + + private final @Nullable Object hint; + + private HintFunction(@Nullable Object hint) { + this.hint = hint; + } + + /** + * Return an empty hint function. + * + * @return + */ + static HintFunction empty() { + return EMPTY; + } + + /** + * Create a {@link HintFunction} from a {@link Bson document} or {@link String index name}. + * + * @param hint + * @return + */ + static HintFunction from(@Nullable Object hint) { + return new HintFunction(hint); + } + + /** + * Return whether a hint is present. + * + * @return + */ + public boolean isPresent() { + return (hint instanceof String hintString && StringUtils.hasText(hintString)) || hint instanceof Bson; + } + + /** + * If a hint is not present, returns {@code true}, otherwise {@code false}. + * + * @return {@code true} if a hint is not present, otherwise {@code false}. + */ + public boolean isEmpty() { + return !isPresent(); + } + + /** + * Apply the hint to consumers depending on the hint format if {@link #isPresent() present}. + * + * @param registryProvider + * @param stringConsumer + * @param bsonConsumer + * @param + */ + public void ifPresent(@Nullable CodecRegistryProvider registryProvider, Function stringConsumer, + Function bsonConsumer) { + + if (isEmpty()) { + return; + } + apply(registryProvider, stringConsumer, bsonConsumer); + } + + /** + * Apply the hint to consumers depending on the hint format. + * + * @param registryProvider + * @param stringConsumer + * @param bsonConsumer + * @return + * @param + */ + public R apply(@Nullable CodecRegistryProvider registryProvider, Function stringConsumer, + Function bsonConsumer) { + + if (isEmpty()) { + throw new IllegalStateException("No hint present"); + } + + if (hint instanceof Bson bson) { + return bsonConsumer.apply(bson); + } + + if (hint instanceof String hintString) { + + if (BsonUtils.isJsonDocument(hintString)) { + return bsonConsumer.apply(BsonUtils.parse(hintString, registryProvider)); + } + return stringConsumer.apply(hintString); + } + + throw new IllegalStateException( + "Unable to read hint of type %s".formatted(hint != null ? hint.getClass() : "null")); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java index 343d2527de..9f9295bba3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/IndexConverters.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,10 +18,10 @@ import java.util.concurrent.TimeUnit; import org.bson.Document; +import org.jspecify.annotations.Nullable; import org.springframework.core.convert.converter.Converter; import org.springframework.data.mongodb.core.index.IndexDefinition; import org.springframework.data.mongodb.core.index.IndexInfo; -import org.springframework.lang.Nullable; import org.springframework.util.ObjectUtils; import com.mongodb.client.model.Collation; @@ -88,9 +88,6 @@ private static Converter getIndexDefinitionIndexO if (indexOptions.containsKey("bits")) { ops = ops.bits((Integer) indexOptions.get("bits")); } - if (indexOptions.containsKey("bucketSize")) { - ops = ops.bucketSize(((Number) indexOptions.get("bucketSize")).doubleValue()); - } if (indexOptions.containsKey("default_language")) { ops = ops.defaultLanguage(indexOptions.get("default_language").toString()); } @@ -115,12 +112,19 @@ private static Converter getIndexDefinitionIndexO ops = ops.collation(fromDocument(indexOptions.get("collation", Document.class))); } + if (indexOptions.containsKey("wildcardProjection")) { + ops.wildcardProjection(indexOptions.get("wildcardProjection", Document.class)); + } + + if (indexOptions.containsKey("hidden")) { + ops = ops.hidden((Boolean) indexOptions.get("hidden")); + } + return ops; }; } - @Nullable - public static Collation fromDocument(@Nullable Document source) { + public static @Nullable Collation fromDocument(@Nullable Document source) { if (source == null) { return null; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java index bccaa50ee5..cd9ba90453 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappedDocument.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,33 +15,38 @@ */ package org.springframework.data.mongodb.core; -import lombok.Getter; -import lombok.RequiredArgsConstructor; - import java.util.Collection; import java.util.List; import org.bson.Document; import org.bson.conversions.Bson; +import org.jspecify.annotations.Nullable; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.data.util.StreamUtils; -import com.mongodb.client.model.Filters; - /** * A MongoDB document in its mapped state. I.e. after a source document has been mapped using mapping information of the * entity the source document was supposed to represent. - * + * * @author Oliver Gierke * @since 2.1 */ -@RequiredArgsConstructor(staticName = "of") public class MappedDocument { - private static final String ID_FIELD = "_id"; + private static final String ID_FIELD = FieldName.ID.name(); private static final Document ID_ONLY_PROJECTION = new Document(ID_FIELD, 1); - private final @Getter Document document; + private final Document document; + + private MappedDocument(Document document) { + this.document = document; + } + + public static MappedDocument of(Document document) { + return new MappedDocument(document); + } public static Document getIdOnlyProjection() { return ID_ONLY_PROJECTION; @@ -66,7 +71,7 @@ public boolean hasNonNullId() { return hasId() && document.get(ID_FIELD) != null; } - public Object getId() { + public @Nullable Object getId() { return document.get(ID_FIELD); } @@ -79,10 +84,74 @@ public boolean isIdPresent(Class type) { } public Bson getIdFilter() { - return Filters.eq(ID_FIELD, document.get(ID_FIELD)); + return new Document(ID_FIELD, document.get(ID_FIELD)); + } + + public @Nullable Object get(String key) { + return document.get(key); + } + + public UpdateDefinition updateWithoutId() { + return new MappedUpdate(Update.fromDocument(document, ID_FIELD)); + } + + public Document getDocument() { + return this.document; + } + + /** + * Updates the documents {@link #ID_FIELD}. + * + * @param value the {@literal _id} value to set. + * @since 3.4.3 + */ + public void updateId(Object value) { + document.put(ID_FIELD, value); } - public Update updateWithoutId() { - return Update.fromDocument(document, ID_FIELD); + /** + * An {@link UpdateDefinition} that indicates that the {@link #getUpdateObject() update object} has already been + * mapped to the specific domain type. + * + * @author Christoph Strobl + * @since 2.2 + */ + static class MappedUpdate implements UpdateDefinition { + + private final Update delegate; + + MappedUpdate(Update delegate) { + this.delegate = delegate; + } + + @Override + public Document getUpdateObject() { + return delegate.getUpdateObject(); + } + + @Override + public boolean modifies(String key) { + return delegate.modifies(key); + } + + @Override + public void inc(String version) { + delegate.inc(version); + } + + @Override + public Boolean isIsolated() { + return delegate.isIsolated(); + } + + @Override + public List getArrayFilters() { + return delegate.getArrayFilters(); + } + + @Override + public boolean hasArrayFilters() { + return delegate.hasArrayFilters(); + } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java new file mode 100644 index 0000000000..396ae1ce8a --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MappingMongoJsonSchemaCreator.java @@ -0,0 +1,459 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.EnumSet; +import java.util.List; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.jspecify.annotations.Nullable; +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.Field; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.Queryable; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ArrayJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.EncryptedJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.ObjectJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.IdentifiableJsonSchemaProperty.QueryableJsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.JsonSchemaObject; +import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema.MongoJsonSchemaBuilder; +import org.springframework.data.mongodb.core.schema.QueryCharacteristic; +import org.springframework.data.mongodb.core.schema.QueryCharacteristics; +import org.springframework.data.mongodb.core.schema.TypedJsonSchemaObject; +import org.springframework.data.util.TypeInformation; +import org.springframework.lang.Contract; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; +import org.springframework.util.CollectionUtils; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; + +/** + * {@link MongoJsonSchemaCreator} implementation using both {@link MongoConverter} and {@link MappingContext} to obtain + * domain type meta information which considers {@link org.springframework.data.mongodb.core.mapping.Field field names} + * and {@link org.springframework.data.mongodb.core.convert.MongoCustomConversions custom conversions}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + */ +class MappingMongoJsonSchemaCreator implements MongoJsonSchemaCreator { + + private final MongoConverter converter; + private final MappingContext, MongoPersistentProperty> mappingContext; + private final Predicate filter; + private final LinkedMultiValueMap> mergeProperties; + + /** + * Create a new instance of {@link MappingMongoJsonSchemaCreator}. + * + * @param converter must not be {@literal null}. + */ + @SuppressWarnings("unchecked") + MappingMongoJsonSchemaCreator(MongoConverter converter) { + + this(converter, (MappingContext, MongoPersistentProperty>) converter.getMappingContext(), + (property) -> true, new LinkedMultiValueMap<>()); + } + + @SuppressWarnings("unchecked") + MappingMongoJsonSchemaCreator(MongoConverter converter, + MappingContext, MongoPersistentProperty> mappingContext, + Predicate filter, LinkedMultiValueMap> mergeProperties) { + + Assert.notNull(converter, "Converter must not be null"); + this.converter = converter; + this.mappingContext = mappingContext; + this.filter = filter; + this.mergeProperties = mergeProperties; + } + + @Override + @Contract("_ -> new") + public MongoJsonSchemaCreator filter(Predicate filter) { + return new MappingMongoJsonSchemaCreator(converter, mappingContext, filter, mergeProperties); + } + + @Override + public PropertySpecifier property(String path) { + return types -> withTypesFor(path, types); + } + + /** + * Specify additional types to be considered when rendering the schema for the given path. + * + * @param path path the path using {@literal dot '.'} notation. + * @param types must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.4 + */ + @Contract("_, _ -> new") + public MongoJsonSchemaCreator withTypesFor(String path, Class... types) { + + LinkedMultiValueMap> clone = mergeProperties.clone(); + for (Class type : types) { + clone.add(path, type); + } + return new MappingMongoJsonSchemaCreator(converter, mappingContext, filter, clone); + } + + @Override + public MongoJsonSchema createSchemaFor(Class type) { + + MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(type); + MongoJsonSchemaBuilder schemaBuilder = MongoJsonSchema.builder(); + + Encrypted encrypted = entity.findAnnotation(Encrypted.class); + if (encrypted != null) { + schemaBuilder.encryptionMetadata(getEncryptionMetadata(entity, encrypted)); + } + + List schemaProperties = computePropertiesForEntity(Collections.emptyList(), entity); + schemaBuilder.properties(schemaProperties.toArray(new JsonSchemaProperty[0])); + + return schemaBuilder.build(); + } + + private static Document getEncryptionMetadata(MongoPersistentEntity entity, Encrypted encrypted) { + + Document encryptionMetadata = new Document(); + + Collection encryptionKeyIds = entity.getEncryptionKeyIds(); + if (!CollectionUtils.isEmpty(encryptionKeyIds)) { + encryptionMetadata.append("keyId", encryptionKeyIds); + } + + if (StringUtils.hasText(encrypted.algorithm())) { + encryptionMetadata.append("algorithm", encrypted.algorithm()); + } + + return encryptionMetadata; + } + + private List computePropertiesForEntity(List path, + MongoPersistentEntity entity) { + + List schemaProperties = new ArrayList<>(); + + for (MongoPersistentProperty nested : entity) { + + List currentPath = new ArrayList<>(path); + + String stringPath = currentPath.stream().map(PersistentProperty::getName).collect(Collectors.joining(".")); + stringPath = StringUtils.hasText(stringPath) ? (stringPath + "." + nested.getName()) : nested.getName(); + if (!filter.test(new PropertyContext(stringPath, nested))) { + if (!mergeProperties.containsKey(stringPath)) { + continue; + } + } + + if (path.contains(nested)) { // cycle guard + schemaProperties.add(createSchemaProperty(computePropertyFieldName(CollectionUtils.lastElement(currentPath)), + Object.class, false)); + break; + } + + currentPath.add(nested); + schemaProperties.add(computeSchemaForProperty(currentPath)); + } + + return schemaProperties; + } + + @SuppressWarnings("NullAway") + private JsonSchemaProperty computeSchemaForProperty(List path) { + + String stringPath = path.stream().map(MongoPersistentProperty::getName).collect(Collectors.joining(".")); + MongoPersistentProperty property = CollectionUtils.lastElement(path); + + boolean required = isRequiredProperty(property); + Class rawTargetType = computeTargetType(property); // target type before conversion + Class targetType = converter.getTypeMapper().getWriteTargetTypeFor(rawTargetType); // conversion target type + + if ((rawTargetType.isPrimitive() || ClassUtils.isPrimitiveArray(rawTargetType)) && targetType == Object.class + || ClassUtils.isAssignable(targetType, rawTargetType)) { + targetType = rawTargetType; + } + + if (!isCollection(property) && ObjectUtils.nullSafeEquals(rawTargetType, targetType)) { + if (property.isEntity() || mergeProperties.containsKey(stringPath)) { + List targetProperties = new ArrayList<>(); + + if (property.isEntity()) { + targetProperties.add(createObjectSchemaPropertyForEntity(path, property, required)); + } + if (mergeProperties.containsKey(stringPath)) { + for (Class theType : mergeProperties.get(stringPath)) { + + ObjectJsonSchemaProperty target = JsonSchemaProperty.object(property.getName()); + List nestedProperties = computePropertiesForEntity(path, + mappingContext.getRequiredPersistentEntity(theType)); + + targetProperties.add(createPotentiallyRequiredSchemaProperty( + target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required)); + } + } + JsonSchemaProperty schemaProperty = targetProperties.size() == 1 ? targetProperties.iterator().next() + : JsonSchemaProperty.merged(targetProperties); + return applyEncryptionDataIfNecessary(property, schemaProperty); + } + } + + String fieldName = computePropertyFieldName(property); + + JsonSchemaProperty schemaProperty; + if (isCollection(property)) { + schemaProperty = createArraySchemaProperty(fieldName, property, required); + } else if (property.isMap()) { + schemaProperty = createSchemaProperty(fieldName, Type.objectType(), required); + } else if (ClassUtils.isAssignable(Enum.class, targetType)) { + schemaProperty = createEnumSchemaProperty(fieldName, targetType, required); + } else { + schemaProperty = createSchemaProperty(fieldName, targetType, required); + } + + return applyEncryptionDataIfNecessary(property, schemaProperty); + } + + private JsonSchemaProperty createArraySchemaProperty(String fieldName, MongoPersistentProperty property, + boolean required) { + + ArrayJsonSchemaProperty schemaProperty = JsonSchemaProperty.array(fieldName); + + if (isSpecificType(property)) { + schemaProperty = potentiallyEnhanceArraySchemaProperty(property, schemaProperty); + } + + return createPotentiallyRequiredSchemaProperty(schemaProperty, required); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + private ArrayJsonSchemaProperty potentiallyEnhanceArraySchemaProperty(MongoPersistentProperty property, + ArrayJsonSchemaProperty schemaProperty) { + + MongoPersistentEntity persistentEntity = mappingContext + .getPersistentEntity(property.getTypeInformation().getRequiredComponentType()); + + if (persistentEntity != null) { + + List nestedProperties = computePropertiesForEntity(Collections.emptyList(), persistentEntity); + + if (nestedProperties.isEmpty()) { + return schemaProperty; + } + + return schemaProperty + .items(JsonSchemaObject.object().properties(nestedProperties.toArray(new JsonSchemaProperty[0]))); + } + + if (ClassUtils.isAssignable(Enum.class, property.getActualType())) { + + List possibleValues = getPossibleEnumValues((Class) property.getActualType()); + + return schemaProperty + .items(createSchemaObject(computeTargetType(property.getActualType(), possibleValues), possibleValues)); + } + + return schemaProperty.items(JsonSchemaObject.of(property.getActualType())); + } + + private boolean isSpecificType(MongoPersistentProperty property) { + return !TypeInformation.OBJECT.equals(property.getTypeInformation().getActualType()); + } + + private JsonSchemaProperty applyEncryptionDataIfNecessary(MongoPersistentProperty property, + JsonSchemaProperty schemaProperty) { + + Encrypted encrypted = property.findAnnotation(Encrypted.class); + if (encrypted == null) { + return schemaProperty; + } + + EncryptedJsonSchemaProperty enc = new EncryptedJsonSchemaProperty(schemaProperty); + if (StringUtils.hasText(encrypted.algorithm())) { + enc = enc.algorithm(encrypted.algorithm()); + } + if (!ObjectUtils.isEmpty(encrypted.keyId())) { + enc = enc.keys(property.getEncryptionKeyIds()); + } + + Queryable queryable = property.findAnnotation(Queryable.class); + if (queryable == null || !StringUtils.hasText(queryable.queryType())) { + return enc; + } + + QueryCharacteristic characteristic = new QueryCharacteristic() { + + @Override + public String queryType() { + return queryable.queryType(); + } + + @Override + public Document toDocument() { + + Document options = QueryCharacteristic.super.toDocument(); + + if (queryable.contentionFactor() >= 0) { + options.put("contention", queryable.contentionFactor()); + } + + if (StringUtils.hasText(queryable.queryAttributes())) { + options.putAll(Document.parse(queryable.queryAttributes())); + } + + return options; + } + }; + return new QueryableJsonSchemaProperty(enc, QueryCharacteristics.of(characteristic)); + } + + private JsonSchemaProperty createObjectSchemaPropertyForEntity(List path, + MongoPersistentProperty property, boolean required) { + + ObjectJsonSchemaProperty target = JsonSchemaProperty.object(property.getName()); + List nestedProperties = computePropertiesForEntity(path, + mappingContext.getRequiredPersistentEntity(property)); + + return createPotentiallyRequiredSchemaProperty( + target.properties(nestedProperties.toArray(new JsonSchemaProperty[0])), required); + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + private JsonSchemaProperty createEnumSchemaProperty(String fieldName, Class targetType, boolean required) { + + List possibleValues = getPossibleEnumValues((Class) targetType); + + targetType = computeTargetType(targetType, possibleValues); + return createSchemaProperty(fieldName, targetType, required, possibleValues); + } + + JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required) { + return createSchemaProperty(fieldName, type, required, Collections.emptyList()); + } + + JsonSchemaProperty createSchemaProperty(String fieldName, Object type, boolean required, + Collection possibleValues) { + + TypedJsonSchemaObject schemaObject = createSchemaObject(type, possibleValues); + + return createPotentiallyRequiredSchemaProperty(JsonSchemaProperty.named(fieldName).with(schemaObject), required); + } + + private TypedJsonSchemaObject createSchemaObject(Object type, Collection possibleValues) { + + TypedJsonSchemaObject schemaObject = type instanceof Type typeObject ? JsonSchemaObject.of(typeObject) + : JsonSchemaObject.of(Class.class.cast(type)); + + if (!CollectionUtils.isEmpty(possibleValues)) { + schemaObject = schemaObject.possibleValues(possibleValues); + } + return schemaObject; + } + + private String computePropertyFieldName(@Nullable PersistentProperty property) { + + Assert.notNull(property, "Property must not be null"); + + return property instanceof MongoPersistentProperty mongoPersistentProperty ? mongoPersistentProperty.getFieldName() + : property.getName(); + } + + private boolean isRequiredProperty(PersistentProperty property) { + return property.getType().isPrimitive(); + } + + private Class computeTargetType(PersistentProperty property) { + + if (!(property instanceof MongoPersistentProperty mongoProperty)) { + return property.getType(); + } + + if (!property.getOwner().isIdProperty(property)) { + return mongoProperty.getFieldType(); + } + + if (mongoProperty.hasExplicitWriteTarget()) { + return mongoProperty.getRequiredAnnotation(Field.class).targetType().getJavaClass(); + } + + return mongoProperty.getFieldType() != mongoProperty.getActualType() ? Object.class : mongoProperty.getFieldType(); + } + + private static Class computeTargetType(Class fallback, List possibleValues) { + return possibleValues.isEmpty() ? fallback : possibleValues.iterator().next().getClass(); + } + + private > List getPossibleEnumValues(Class targetType) { + + EnumSet enumSet = EnumSet.allOf(targetType); + List possibleValues = new ArrayList<>(enumSet.size()); + + for (Object enumValue : enumSet) { + possibleValues.add(converter.convertToMongoType(enumValue)); + } + + return possibleValues; + } + + private static boolean isCollection(MongoPersistentProperty property) { + return property.isCollectionLike() && !property.getType().equals(byte[].class); + } + + static JsonSchemaProperty createPotentiallyRequiredSchemaProperty(JsonSchemaProperty property, boolean required) { + return required ? JsonSchemaProperty.required(property) : property; + } + + class PropertyContext implements JsonSchemaPropertyContext { + + private final String path; + private final MongoPersistentProperty property; + + public PropertyContext(String path, MongoPersistentProperty property) { + this.path = path; + this.property = property; + } + + @Override + public String getPath() { + return path; + } + + @Override + public MongoPersistentProperty getProperty() { + return property; + } + + @Override + @SuppressWarnings("unchecked") + public @Nullable MongoPersistentEntity resolveEntity(MongoPersistentProperty property) { + return (MongoPersistentEntity) mappingContext.getPersistentEntity(property); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAction.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAction.java index a9d277261c..c827c5b8a9 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAction.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAction.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,7 +16,7 @@ package org.springframework.data.mongodb.core; import org.bson.Document; -import org.springframework.lang.Nullable; +import org.jspecify.annotations.Nullable; import org.springframework.util.Assert; import com.mongodb.WriteConcern; @@ -57,8 +57,8 @@ public class MongoAction { public MongoAction(@Nullable WriteConcern defaultWriteConcern, MongoActionOperation mongoActionOperation, String collectionName, @Nullable Class entityType, @Nullable Document document, @Nullable Document query) { - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(mongoActionOperation, "MongoActionOperation must not be null!"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(mongoActionOperation, "MongoActionOperation must not be null"); this.defaultWriteConcern = defaultWriteConcern; this.mongoActionOperation = mongoActionOperation; @@ -72,28 +72,23 @@ public String getCollectionName() { return collectionName; } - @Nullable - public WriteConcern getDefaultWriteConcern() { + public @Nullable WriteConcern getDefaultWriteConcern() { return defaultWriteConcern; } - @Nullable - public Class getEntityType() { + public @Nullable Class getEntityType() { return entityType; } - @Nullable - public MongoActionOperation getMongoActionOperation() { + public @Nullable MongoActionOperation getMongoActionOperation() { return mongoActionOperation; } - @Nullable - public Document getQuery() { + public @Nullable Document getQuery() { return query; } - @Nullable - public Document getDocument() { + public @Nullable Document getDocument() { return document; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java index 9d722018af..509d10887b 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoActionOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,9 +21,10 @@ * * @author Mark Pollack * @author Oliver Gierke + * @author Christoph Strobl * @see MongoAction */ public enum MongoActionOperation { - REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE, BULK; + REMOVE, UPDATE, INSERT, INSERT_LIST, SAVE, BULK, REPLACE } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdmin.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdmin.java index 6b697c01c7..5fcc6c9599 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdmin.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdmin.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,7 +20,7 @@ import org.springframework.jmx.export.annotation.ManagedResource; import org.springframework.util.Assert; -import com.mongodb.MongoClient; +import com.mongodb.client.MongoClient; import com.mongodb.client.MongoDatabase; /** @@ -30,37 +30,34 @@ * @author Thomas Darimont * @author Mark Paluch * @author Christoph Strobl + * @deprecated since 4.5 */ +@Deprecated(since = "4.5", forRemoval = true) @ManagedResource(description = "Mongo Admin Operations") public class MongoAdmin implements MongoAdminOperations { private final MongoClient mongoClient; - public MongoAdmin(MongoClient mongoClient) { + /** + * @param client the underlying {@link com.mongodb.client.MongoClient} used for data access. + * @since 2.2 + */ + public MongoAdmin(MongoClient client) { - Assert.notNull(mongoClient, "MongoClient must not be null!"); - this.mongoClient = mongoClient; + Assert.notNull(client, "Client must not be null"); + this.mongoClient = client; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoAdminOperations#dropDatabase(java.lang.String) - */ @ManagedOperation public void dropDatabase(String databaseName) { getDB(databaseName).drop(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoAdminOperations#createDatabase(java.lang.String) - */ @ManagedOperation public void createDatabase(String databaseName) { getDB(databaseName); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.core.MongoAdminOperations#getDatabaseStats(java.lang.String) - */ @ManagedOperation public String getDatabaseStats(String databaseName) { return getDB(databaseName).runCommand(new Document("dbStats", 1).append("scale", 1024)).toJson(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdminOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdminOperations.java index 70ef0f443f..ec03302f7e 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdminOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoAdminOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,6 +21,7 @@ * @author Mark Pollack * @author Oliver Gierke */ +@Deprecated(since = "4.5", forRemoval = true) public interface MongoAdminOperations { @ManagedOperation diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientFactoryBean.java index 078ab8cc67..9210dd85ec 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientFactoryBean.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientFactoryBean.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,66 +16,70 @@ package org.springframework.data.mongodb.core; import java.net.UnknownHostException; -import java.util.ArrayList; -import java.util.Collections; +import java.util.Arrays; import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Collectors; +import org.bson.UuidRepresentation; +import org.jspecify.annotations.Nullable; import org.springframework.beans.factory.config.AbstractFactoryBean; import org.springframework.dao.DataAccessException; import org.springframework.dao.support.PersistenceExceptionTranslator; -import org.springframework.lang.Nullable; +import org.springframework.data.mongodb.SpringDataMongoDB; import org.springframework.util.CollectionUtils; +import org.springframework.util.ObjectUtils; import org.springframework.util.StringUtils; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientOptions; +import com.mongodb.ConnectionString; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoClientSettings.Builder; import com.mongodb.MongoCredential; import com.mongodb.ServerAddress; +import com.mongodb.client.MongoClient; +import com.mongodb.client.MongoClients; +import com.mongodb.connection.ClusterSettings; +import com.mongodb.connection.ConnectionPoolSettings; +import com.mongodb.connection.ServerSettings; +import com.mongodb.connection.SocketSettings; +import com.mongodb.connection.SslSettings; +import com.mongodb.event.ClusterListener; /** * Convenient factory for configuring MongoDB. * * @author Christoph Strobl * @author Mark Paluch - * @since 1.7 */ public class MongoClientFactoryBean extends AbstractFactoryBean implements PersistenceExceptionTranslator { - private static final PersistenceExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator(); - - private @Nullable MongoClientOptions mongoClientOptions; + private @Nullable MongoClientSettings mongoClientSettings; private @Nullable String host; private @Nullable Integer port; - private List replicaSetSeeds = Collections.emptyList(); - private List credentials = Collections.emptyList(); + private @Nullable List credential = null; + private @Nullable ConnectionString connectionString; + private @Nullable String replicaSet = null; - private PersistenceExceptionTranslator exceptionTranslator = DEFAULT_EXCEPTION_TRANSLATOR; + private PersistenceExceptionTranslator exceptionTranslator = MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR; /** - * Set the {@link MongoClientOptions} to be used when creating {@link MongoClient}. + * Set the {@link MongoClientSettings} to be used when creating {@link MongoClient}. * * @param mongoClientOptions */ - public void setMongoClientOptions(@Nullable MongoClientOptions mongoClientOptions) { - this.mongoClientOptions = mongoClientOptions; + public void setMongoClientSettings(@Nullable MongoClientSettings mongoClientOptions) { + this.mongoClientSettings = mongoClientOptions; } /** * Set the list of credentials to be used when creating {@link MongoClient}. * - * @param credentials can be {@literal null}. + * @param credential can be {@literal null}. */ - public void setCredentials(@Nullable MongoCredential[] credentials) { - this.credentials = filterNonNullElementsAsList(credentials); - } - - /** - * Set the list of {@link ServerAddress} to build up a replica set for. - * - * @param replicaSetSeeds can be {@literal null}. - */ - public void setReplicaSetSeeds(@Nullable ServerAddress[] replicaSetSeeds) { - this.replicaSetSeeds = filterNonNullElementsAsList(replicaSetSeeds); + public void setCredential(MongoCredential @Nullable[] credential) { + this.credential = Arrays.asList(credential); } /** @@ -96,50 +100,39 @@ public void setPort(int port) { this.port = port; } + public void setConnectionString(@Nullable ConnectionString connectionString) { + this.connectionString = connectionString; + } + + public void setReplicaSet(@Nullable String replicaSet) { + this.replicaSet = replicaSet; + } + /** * Configures the {@link PersistenceExceptionTranslator} to use. * * @param exceptionTranslator */ public void setExceptionTranslator(@Nullable PersistenceExceptionTranslator exceptionTranslator) { - this.exceptionTranslator = exceptionTranslator == null ? DEFAULT_EXCEPTION_TRANSLATOR : exceptionTranslator; + this.exceptionTranslator = exceptionTranslator == null ? MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR + : exceptionTranslator; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - public Class getObjectType() { - return MongoClient.class; + @Override + public @Nullable DataAccessException translateExceptionIfPossible(RuntimeException ex) { + return exceptionTranslator.translateExceptionIfPossible(ex); } - /* - * (non-Javadoc) - * @see org.springframework.dao.support.PersistenceExceptionTranslator#translateExceptionIfPossible(java.lang.RuntimeException) - */ - @Nullable - public DataAccessException translateExceptionIfPossible(RuntimeException ex) { - return exceptionTranslator.translateExceptionIfPossible(ex); + @Override + public Class getObjectType() { + return MongoClient.class; } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() - */ @Override protected MongoClient createInstance() throws Exception { - - if (mongoClientOptions == null) { - mongoClientOptions = MongoClientOptions.builder().build(); - } - - return createMongoClient(); + return createMongoClient(computeClientSetting()); } - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.config.AbstractFactoryBean#destroyInstance(java.lang.Object) - */ @Override protected void destroyInstance(@Nullable MongoClient instance) throws Exception { @@ -148,43 +141,207 @@ protected void destroyInstance(@Nullable MongoClient instance) throws Exception } } - private MongoClient createMongoClient() throws UnknownHostException { + /** + * Create {@link MongoClientSettings} based on configuration and priority (lower is better). + *
    + *
  1. {@link MongoClientFactoryBean#mongoClientSettings}
  2. + *
  3. {@link MongoClientFactoryBean#connectionString}
  4. + *
  5. default {@link MongoClientSettings}
  6. + *
+ * + * @since 3.0 + */ + protected MongoClientSettings computeClientSetting() { - if (!CollectionUtils.isEmpty(replicaSetSeeds)) { - return new MongoClient(replicaSetSeeds, credentials, mongoClientOptions); + if (connectionString != null && (StringUtils.hasText(host) || port != null)) { + throw new IllegalStateException("ConnectionString and host/port configuration exclude one another"); } - return new MongoClient(createConfiguredOrDefaultServerAddress(), credentials, mongoClientOptions); - } + ConnectionString connectionString = this.connectionString != null ? this.connectionString + : new ConnectionString(String.format("mongodb://%s:%s", getOrDefault(host, ServerAddress.defaultHost()), + getOrDefault(port, "" + ServerAddress.defaultPort()))); + + Builder builder = MongoClientSettings.builder().applyConnectionString(connectionString); + builder.uuidRepresentation(UuidRepresentation.JAVA_LEGACY); + + if (mongoClientSettings != null) { + + MongoClientSettings defaultSettings = MongoClientSettings.builder().build(); + + SslSettings sslSettings = mongoClientSettings.getSslSettings(); + ClusterSettings clusterSettings = mongoClientSettings.getClusterSettings(); + ConnectionPoolSettings connectionPoolSettings = mongoClientSettings.getConnectionPoolSettings(); + SocketSettings socketSettings = mongoClientSettings.getSocketSettings(); + ServerSettings serverSettings = mongoClientSettings.getServerSettings(); + + builder = builder // + .applicationName(computeSettingsValue(defaultSettings.getApplicationName(), + mongoClientSettings.getApplicationName(), connectionString.getApplicationName())) // + .applyToSslSettings(settings -> { - private ServerAddress createConfiguredOrDefaultServerAddress() throws UnknownHostException { + applySettings(settings::enabled, computeSettingsValue(SslSettings::isEnabled, + defaultSettings.getSslSettings(), sslSettings, connectionString.getSslEnabled())); + applySettings(settings::invalidHostNameAllowed, (computeSettingsValue(SslSettings::isInvalidHostNameAllowed, + defaultSettings.getSslSettings(), sslSettings, connectionString.getSslInvalidHostnameAllowed()))); + settings.context(sslSettings.getContext()); + }).applyToClusterSettings(settings -> { - ServerAddress defaultAddress = new ServerAddress(); + applySettings(settings::hosts, + computeSettingsValue(ClusterSettings::getHosts, defaultSettings.getClusterSettings(), clusterSettings, + connectionString.getHosts().stream().map(ServerAddress::new).collect(Collectors.toList()))); - return new ServerAddress(StringUtils.hasText(host) ? host : defaultAddress.getHost(), - port != null ? port.intValue() : defaultAddress.getPort()); + applySettings(settings::requiredReplicaSetName, + computeSettingsValue(ClusterSettings::getRequiredReplicaSetName, defaultSettings.getClusterSettings(), + clusterSettings, connectionString.getRequiredReplicaSetName())); + + applySettings(settings::srvHost, computeSettingsValue(ClusterSettings::getSrvHost, + defaultSettings.getClusterSettings(), clusterSettings, null)); + + applySettings(settings::mode, computeSettingsValue(ClusterSettings::getMode, + defaultSettings.getClusterSettings(), clusterSettings, null)); + + applySettings(it -> settings.localThreshold(it.longValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ClusterSettings it) -> it.getLocalThreshold(TimeUnit.MILLISECONDS), + defaultSettings.getClusterSettings(), clusterSettings, connectionString.getLocalThreshold())); + + applySettings(settings::requiredClusterType, computeSettingsValue(ClusterSettings::getRequiredClusterType, + defaultSettings.getClusterSettings(), clusterSettings, null)); + applySettings(it -> settings.serverSelectionTimeout(it.longValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ClusterSettings it) -> it.getServerSelectionTimeout(TimeUnit.MILLISECONDS), + defaultSettings.getClusterSettings(), clusterSettings, + connectionString.getServerSelectionTimeout())); + + applySettings(settings::serverSelector, computeSettingsValue(ClusterSettings::getServerSelector, + defaultSettings.getClusterSettings(), clusterSettings, null)); + List clusterListeners = computeSettingsValue(ClusterSettings::getClusterListeners, + defaultSettings.getClusterSettings(), clusterSettings, null); + if (clusterListeners != null) { + clusterListeners.forEach(settings::addClusterListener); + } + }) // + .applyToConnectionPoolSettings(settings -> { + + applySettings(it -> settings.maintenanceFrequency(it, TimeUnit.MILLISECONDS), + computeSettingsValue((ConnectionPoolSettings it) -> it.getMaintenanceFrequency(TimeUnit.MILLISECONDS), + defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, null)); + + applySettings(it -> settings.maxConnectionIdleTime(it.longValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxConnectionIdleTime(TimeUnit.MILLISECONDS), + defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, + connectionString.getMaxConnectionIdleTime())); + + applySettings(it -> settings.maxConnectionLifeTime(it.longValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxConnectionLifeTime(TimeUnit.MILLISECONDS), + defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, + connectionString.getMaxConnectionLifeTime())); + + applySettings(it -> settings.maxWaitTime(it.longValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ConnectionPoolSettings it) -> it.getMaxWaitTime(TimeUnit.MILLISECONDS), + defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, + connectionString.getMaxWaitTime())); + + applySettings(it -> settings.maintenanceInitialDelay(it, TimeUnit.MILLISECONDS), + computeSettingsValue( + (ConnectionPoolSettings it) -> it.getMaintenanceInitialDelay(TimeUnit.MILLISECONDS), + defaultSettings.getConnectionPoolSettings(), connectionPoolSettings, null)); + + applySettings(settings::minSize, + computeSettingsValue(ConnectionPoolSettings::getMinSize, defaultSettings.getConnectionPoolSettings(), + connectionPoolSettings, connectionString.getMinConnectionPoolSize())); + applySettings(settings::maxSize, + computeSettingsValue(ConnectionPoolSettings::getMaxSize, defaultSettings.getConnectionPoolSettings(), + connectionPoolSettings, connectionString.getMaxConnectionPoolSize())); + }) // + .applyToSocketSettings(settings -> { + + applySettings(it -> settings.connectTimeout(it, TimeUnit.MILLISECONDS), + computeSettingsValue((SocketSettings it) -> it.getConnectTimeout(TimeUnit.MILLISECONDS), + defaultSettings.getSocketSettings(), socketSettings, connectionString.getConnectTimeout())); + + applySettings(it -> settings.readTimeout(it, TimeUnit.MILLISECONDS), + computeSettingsValue((SocketSettings it) -> it.getReadTimeout(TimeUnit.MILLISECONDS), + defaultSettings.getSocketSettings(), socketSettings, connectionString.getSocketTimeout())); + applySettings(settings::receiveBufferSize, computeSettingsValue(SocketSettings::getReceiveBufferSize, + defaultSettings.getSocketSettings(), socketSettings, null)); + applySettings(settings::sendBufferSize, computeSettingsValue(SocketSettings::getSendBufferSize, + defaultSettings.getSocketSettings(), socketSettings, null)); + }) // + .applyToServerSettings(settings -> { + + applySettings(it -> settings.minHeartbeatFrequency(it.intValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ServerSettings it) -> it.getMinHeartbeatFrequency(TimeUnit.MILLISECONDS), + defaultSettings.getServerSettings(), serverSettings, null)); + + applySettings(it -> settings.heartbeatFrequency(it.intValue(), TimeUnit.MILLISECONDS), + computeSettingsValue((ServerSettings it) -> it.getHeartbeatFrequency(TimeUnit.MILLISECONDS), + defaultSettings.getServerSettings(), serverSettings, connectionString.getHeartbeatFrequency())); + settings.applySettings(serverSettings); + }) // + .autoEncryptionSettings(mongoClientSettings.getAutoEncryptionSettings()) // + .codecRegistry(mongoClientSettings.getCodecRegistry()); // + + applySettings(builder::readConcern, computeSettingsValue(defaultSettings.getReadConcern(), + mongoClientSettings.getReadConcern(), connectionString.getReadConcern())); + applySettings(builder::writeConcern, computeSettingsValue(defaultSettings.getWriteConcern(), + mongoClientSettings.getWriteConcern(), connectionString.getWriteConcern())); + applySettings(builder::readPreference, computeSettingsValue(defaultSettings.getReadPreference(), + mongoClientSettings.getReadPreference(), connectionString.getReadPreference())); + applySettings(builder::retryReads, computeSettingsValue(defaultSettings.getRetryReads(), + mongoClientSettings.getRetryReads(), connectionString.getRetryReads())); + applySettings(builder::retryWrites, computeSettingsValue(defaultSettings.getRetryWrites(), + mongoClientSettings.getRetryWrites(), connectionString.getRetryWritesValue())); + applySettings(builder::uuidRepresentation, + computeSettingsValue(null, mongoClientSettings.getUuidRepresentation(), UuidRepresentation.JAVA_LEGACY)); + } + + if (!CollectionUtils.isEmpty(credential)) { + builder = builder.credential(credential.iterator().next()); + } + + if (StringUtils.hasText(replicaSet)) { + builder.applyToClusterSettings((settings) -> { + settings.requiredReplicaSetName(replicaSet); + }); + } + + return builder.build(); } - /** - * Returns the given array as {@link List} with all {@literal null} elements removed. - * - * @param elements the elements to filter , can be {@literal null}. - * @return a new unmodifiable {@link List#} from the given elements without {@literal null}s. - */ - private static List filterNonNullElementsAsList(@Nullable T[] elements) { + private void applySettings(Consumer settingsBuilder, @Nullable T value) { - if (elements == null) { - return Collections.emptyList(); + if (ObjectUtils.isEmpty(value)) { + return; } + settingsBuilder.accept(value); + } - List candidateElements = new ArrayList(); + private @Nullable T computeSettingsValue(Function function, S defaultValueHolder, S settingsValueHolder, + @Nullable T connectionStringValue) { + return computeSettingsValue(function.apply(defaultValueHolder), function.apply(settingsValueHolder), + connectionStringValue); + } + + private @Nullable T computeSettingsValue(@Nullable T defaultValue, T fromSettings, @Nullable T fromConnectionString) { - for (T element : elements) { - if (element != null) { - candidateElements.add(element); - } + boolean fromSettingsIsDefault = ObjectUtils.nullSafeEquals(defaultValue, fromSettings); + boolean fromConnectionStringIsDefault = ObjectUtils.nullSafeEquals(defaultValue, fromConnectionString); + + if (!fromSettingsIsDefault) { + return fromSettings; } + return !fromConnectionStringIsDefault ? fromConnectionString : defaultValue; + } - return Collections.unmodifiableList(candidateElements); + private MongoClient createMongoClient(MongoClientSettings settings) throws UnknownHostException { + return MongoClients.create(settings, SpringDataMongoDB.driverInformation()); + } + + private String getOrDefault(@Nullable Object value, String defaultValue) { + + if(value == null) { + return defaultValue; + } + String sValue = value.toString(); + return StringUtils.hasText(sValue) ? sValue : defaultValue; } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientOptionsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientOptionsFactoryBean.java deleted file mode 100644 index 3481a9f80e..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientOptionsFactoryBean.java +++ /dev/null @@ -1,314 +0,0 @@ -/* - * Copyright 2015-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import javax.net.SocketFactory; -import javax.net.ssl.SSLSocketFactory; - -import org.springframework.beans.factory.config.AbstractFactoryBean; -import org.springframework.data.mongodb.MongoDbFactory; -import org.springframework.lang.Nullable; - -import com.mongodb.DBDecoderFactory; -import com.mongodb.DBEncoderFactory; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientOptions; -import com.mongodb.ReadPreference; -import com.mongodb.WriteConcern; - -/** - * A factory bean for construction of a {@link MongoClientOptions} instance. - * - * @author Christoph Strobl - * @author Oliver Gierke - * @author Mark Paluch - * @since 1.7 - */ -public class MongoClientOptionsFactoryBean extends AbstractFactoryBean { - - private static final MongoClientOptions DEFAULT_MONGO_OPTIONS = MongoClientOptions.builder().build(); - - private @Nullable String description = DEFAULT_MONGO_OPTIONS.getDescription(); - private int minConnectionsPerHost = DEFAULT_MONGO_OPTIONS.getMinConnectionsPerHost(); - private int connectionsPerHost = DEFAULT_MONGO_OPTIONS.getConnectionsPerHost(); - private int threadsAllowedToBlockForConnectionMultiplier = DEFAULT_MONGO_OPTIONS - .getThreadsAllowedToBlockForConnectionMultiplier(); - private int maxWaitTime = DEFAULT_MONGO_OPTIONS.getMaxWaitTime(); - private int maxConnectionIdleTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionIdleTime(); - private int maxConnectionLifeTime = DEFAULT_MONGO_OPTIONS.getMaxConnectionLifeTime(); - private int connectTimeout = DEFAULT_MONGO_OPTIONS.getConnectTimeout(); - private int socketTimeout = DEFAULT_MONGO_OPTIONS.getSocketTimeout(); - private boolean socketKeepAlive = DEFAULT_MONGO_OPTIONS.isSocketKeepAlive(); - private @Nullable ReadPreference readPreference = DEFAULT_MONGO_OPTIONS.getReadPreference(); - private DBDecoderFactory dbDecoderFactory = DEFAULT_MONGO_OPTIONS.getDbDecoderFactory(); - private DBEncoderFactory dbEncoderFactory = DEFAULT_MONGO_OPTIONS.getDbEncoderFactory(); - private @Nullable WriteConcern writeConcern = DEFAULT_MONGO_OPTIONS.getWriteConcern(); - private @Nullable SocketFactory socketFactory = DEFAULT_MONGO_OPTIONS.getSocketFactory(); - private boolean cursorFinalizerEnabled = DEFAULT_MONGO_OPTIONS.isCursorFinalizerEnabled(); - private boolean alwaysUseMBeans = DEFAULT_MONGO_OPTIONS.isAlwaysUseMBeans(); - private int heartbeatFrequency = DEFAULT_MONGO_OPTIONS.getHeartbeatFrequency(); - private int minHeartbeatFrequency = DEFAULT_MONGO_OPTIONS.getMinHeartbeatFrequency(); - private int heartbeatConnectTimeout = DEFAULT_MONGO_OPTIONS.getHeartbeatConnectTimeout(); - private int heartbeatSocketTimeout = DEFAULT_MONGO_OPTIONS.getHeartbeatSocketTimeout(); - private String requiredReplicaSetName = DEFAULT_MONGO_OPTIONS.getRequiredReplicaSetName(); - private int serverSelectionTimeout = DEFAULT_MONGO_OPTIONS.getServerSelectionTimeout(); - - private boolean ssl; - private @Nullable SSLSocketFactory sslSocketFactory; - - /** - * Set the {@link MongoClient} description. - * - * @param description - */ - public void setDescription(@Nullable String description) { - this.description = description; - } - - /** - * Set the minimum number of connections per host. - * - * @param minConnectionsPerHost - */ - public void setMinConnectionsPerHost(int minConnectionsPerHost) { - this.minConnectionsPerHost = minConnectionsPerHost; - } - - /** - * Set the number of connections allowed per host. Will block if run out. Default is 10. System property - * {@code MONGO.POOLSIZE} can override - * - * @param connectionsPerHost - */ - public void setConnectionsPerHost(int connectionsPerHost) { - this.connectionsPerHost = connectionsPerHost; - } - - /** - * Set the multiplier for connectionsPerHost for # of threads that can block. Default is 5. If connectionsPerHost is - * 10, and threadsAllowedToBlockForConnectionMultiplier is 5, then 50 threads can block more than that and an - * exception will be thrown. - * - * @param threadsAllowedToBlockForConnectionMultiplier - */ - public void setThreadsAllowedToBlockForConnectionMultiplier(int threadsAllowedToBlockForConnectionMultiplier) { - this.threadsAllowedToBlockForConnectionMultiplier = threadsAllowedToBlockForConnectionMultiplier; - } - - /** - * Set the max wait time of a blocking thread for a connection. Default is 12000 ms (2 minutes) - * - * @param maxWaitTime - */ - public void setMaxWaitTime(int maxWaitTime) { - this.maxWaitTime = maxWaitTime; - } - - /** - * The maximum idle time for a pooled connection. - * - * @param maxConnectionIdleTime - */ - public void setMaxConnectionIdleTime(int maxConnectionIdleTime) { - this.maxConnectionIdleTime = maxConnectionIdleTime; - } - - /** - * Set the maximum life time for a pooled connection. - * - * @param maxConnectionLifeTime - */ - public void setMaxConnectionLifeTime(int maxConnectionLifeTime) { - this.maxConnectionLifeTime = maxConnectionLifeTime; - } - - /** - * Set the connect timeout in milliseconds. 0 is default and infinite. - * - * @param connectTimeout - */ - public void setConnectTimeout(int connectTimeout) { - this.connectTimeout = connectTimeout; - } - - /** - * Set the socket timeout. 0 is default and infinite. - * - * @param socketTimeout - */ - public void setSocketTimeout(int socketTimeout) { - this.socketTimeout = socketTimeout; - } - - /** - * Set the keep alive flag, controls whether or not to have socket keep alive timeout. Defaults to false. - * - * @param socketKeepAlive - */ - public void setSocketKeepAlive(boolean socketKeepAlive) { - this.socketKeepAlive = socketKeepAlive; - } - - /** - * Set the {@link ReadPreference}. - * - * @param readPreference - */ - public void setReadPreference(@Nullable ReadPreference readPreference) { - this.readPreference = readPreference; - } - - /** - * Set the {@link WriteConcern} that will be the default value used when asking the {@link MongoDbFactory} for a DB - * object. - * - * @param writeConcern - */ - public void setWriteConcern(@Nullable WriteConcern writeConcern) { - this.writeConcern = writeConcern; - } - - /** - * @param socketFactory - */ - public void setSocketFactory(@Nullable SocketFactory socketFactory) { - this.socketFactory = socketFactory; - } - - /** - * Set the frequency that the driver will attempt to determine the current state of each server in the cluster. - * - * @param heartbeatFrequency - */ - public void setHeartbeatFrequency(int heartbeatFrequency) { - this.heartbeatFrequency = heartbeatFrequency; - } - - /** - * In the event that the driver has to frequently re-check a server's availability, it will wait at least this long - * since the previous check to avoid wasted effort. - * - * @param minHeartbeatFrequency - */ - public void setMinHeartbeatFrequency(int minHeartbeatFrequency) { - this.minHeartbeatFrequency = minHeartbeatFrequency; - } - - /** - * Set the connect timeout for connections used for the cluster heartbeat. - * - * @param heartbeatConnectTimeout - */ - public void setHeartbeatConnectTimeout(int heartbeatConnectTimeout) { - this.heartbeatConnectTimeout = heartbeatConnectTimeout; - } - - /** - * Set the socket timeout for connections used for the cluster heartbeat. - * - * @param heartbeatSocketTimeout - */ - public void setHeartbeatSocketTimeout(int heartbeatSocketTimeout) { - this.heartbeatSocketTimeout = heartbeatSocketTimeout; - } - - /** - * Configures the name of the replica set. - * - * @param requiredReplicaSetName - */ - public void setRequiredReplicaSetName(String requiredReplicaSetName) { - this.requiredReplicaSetName = requiredReplicaSetName; - } - - /** - * This controls if the driver should us an SSL connection. Defaults to |@literal false}. - * - * @param ssl - */ - public void setSsl(boolean ssl) { - this.ssl = ssl; - } - - /** - * Set the {@link SSLSocketFactory} to use for the {@literal SSL} connection. If none is configured here, - * {@link SSLSocketFactory#getDefault()} will be used. - * - * @param sslSocketFactory - */ - public void setSslSocketFactory(@Nullable SSLSocketFactory sslSocketFactory) { - - this.sslSocketFactory = sslSocketFactory; - this.ssl = sslSocketFactory != null; - } - - /** - * Set the {@literal server selection timeout} in msec for a 3.x MongoDB Java driver. If not set the default value of - * 30 sec will be used. A value of 0 means that it will timeout immediately if no server is available. A negative - * value means to wait indefinitely. - * - * @param serverSelectionTimeout in msec. - */ - public void setServerSelectionTimeout(int serverSelectionTimeout) { - this.serverSelectionTimeout = serverSelectionTimeout; - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.config.AbstractFactoryBean#createInstance() - */ - @SuppressWarnings("ConstantConditions") - @Override - protected MongoClientOptions createInstance() throws Exception { - - SocketFactory socketFactoryToUse = ssl - ? (sslSocketFactory != null ? sslSocketFactory : SSLSocketFactory.getDefault()) - : this.socketFactory; - - return MongoClientOptions.builder() // - .alwaysUseMBeans(this.alwaysUseMBeans) // - .connectionsPerHost(this.connectionsPerHost) // - .connectTimeout(connectTimeout) // - .cursorFinalizerEnabled(cursorFinalizerEnabled) // - .dbDecoderFactory(dbDecoderFactory) // - .dbEncoderFactory(dbEncoderFactory) // - .description(description) // - .heartbeatConnectTimeout(heartbeatConnectTimeout) // - .heartbeatFrequency(heartbeatFrequency) // - .heartbeatSocketTimeout(heartbeatSocketTimeout) // - .maxConnectionIdleTime(maxConnectionIdleTime) // - .maxConnectionLifeTime(maxConnectionLifeTime) // - .maxWaitTime(maxWaitTime) // - .minConnectionsPerHost(minConnectionsPerHost) // - .minHeartbeatFrequency(minHeartbeatFrequency) // - .readPreference(readPreference) // - .requiredReplicaSetName(requiredReplicaSetName) // - .serverSelectionTimeout(serverSelectionTimeout) // - .socketFactory(socketFactoryToUse) // - .socketKeepAlive(socketKeepAlive) // - .socketTimeout(socketTimeout) // - .threadsAllowedToBlockForConnectionMultiplier(threadsAllowedToBlockForConnectionMultiplier) // - .writeConcern(writeConcern).build(); - } - - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.FactoryBean#getObjectType() - */ - public Class getObjectType() { - return MongoClientOptions.class; - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java new file mode 100644 index 0000000000..813d3a4a04 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoClientSettingsFactoryBean.java @@ -0,0 +1,497 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.security.NoSuchAlgorithmException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import javax.net.ssl.SSLContext; + +import org.bson.UuidRepresentation; +import org.bson.codecs.configuration.CodecRegistry; +import org.jspecify.annotations.Nullable; +import org.springframework.beans.factory.config.AbstractFactoryBean; +import org.springframework.util.CollectionUtils; +import org.springframework.util.StringUtils; + +import com.mongodb.AutoEncryptionSettings; +import com.mongodb.MongoClientSettings; +import com.mongodb.MongoClientSettings.Builder; +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; +import com.mongodb.ServerAddress; +import com.mongodb.ServerApi; +import com.mongodb.WriteConcern; +import com.mongodb.connection.ClusterConnectionMode; +import com.mongodb.connection.ClusterType; +import com.mongodb.connection.TransportSettings; + +/** + * A factory bean for construction of a {@link MongoClientSettings} instance to be used with a MongoDB driver. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0 + */ +public class MongoClientSettingsFactoryBean extends AbstractFactoryBean { + + private static final MongoClientSettings DEFAULT_MONGO_SETTINGS = MongoClientSettings.builder().build(); + + private CodecRegistry codecRegistry = DEFAULT_MONGO_SETTINGS.getCodecRegistry(); + + @Nullable private TransportSettings transportSettings; + + private ReadPreference readPreference = DEFAULT_MONGO_SETTINGS.getReadPreference(); + private ReadConcern readConcern = DEFAULT_MONGO_SETTINGS.getReadConcern(); + private @Nullable Boolean retryReads = null; + + private WriteConcern writeConcern = DEFAULT_MONGO_SETTINGS.getWriteConcern(); + private @Nullable Boolean retryWrites = null; + + private @Nullable String applicationName = null; + + private @Nullable UuidRepresentation uUidRepresentation = null; + + // --> Socket Settings + + private int socketConnectTimeoutMS = DEFAULT_MONGO_SETTINGS.getSocketSettings() + .getConnectTimeout(TimeUnit.MILLISECONDS); + private int socketReadTimeoutMS = DEFAULT_MONGO_SETTINGS.getSocketSettings().getReadTimeout(TimeUnit.MILLISECONDS); + private int socketReceiveBufferSize = DEFAULT_MONGO_SETTINGS.getSocketSettings().getReceiveBufferSize(); + private int socketSendBufferSize = DEFAULT_MONGO_SETTINGS.getSocketSettings().getSendBufferSize(); + + // --> Cluster Settings + + private @Nullable String clusterSrvHost = DEFAULT_MONGO_SETTINGS.getClusterSettings().getSrvHost(); + private List clusterHosts = Collections.emptyList(); + private @Nullable ClusterConnectionMode clusterConnectionMode = null; + private ClusterType custerRequiredClusterType = DEFAULT_MONGO_SETTINGS.getClusterSettings().getRequiredClusterType(); + private String clusterRequiredReplicaSetName = DEFAULT_MONGO_SETTINGS.getClusterSettings() + .getRequiredReplicaSetName(); + private long clusterLocalThresholdMS = DEFAULT_MONGO_SETTINGS.getClusterSettings() + .getLocalThreshold(TimeUnit.MILLISECONDS); + private long clusterServerSelectionTimeoutMS = DEFAULT_MONGO_SETTINGS.getClusterSettings() + .getServerSelectionTimeout(TimeUnit.MILLISECONDS); + + // --> ConnectionPoolSettings + + private int poolMaxSize = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings().getMaxSize(); + private int poolMinSize = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings().getMinSize(); + private long poolMaxWaitTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings() + .getMaxWaitTime(TimeUnit.MILLISECONDS); + private long poolMaxConnectionLifeTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings() + .getMaxConnectionLifeTime(TimeUnit.MILLISECONDS); + private long poolMaxConnectionIdleTimeMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings() + .getMaxConnectionIdleTime(TimeUnit.MILLISECONDS); + private long poolMaintenanceInitialDelayMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings() + .getMaintenanceInitialDelay(TimeUnit.MILLISECONDS); + private long poolMaintenanceFrequencyMS = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings() + .getMaintenanceFrequency(TimeUnit.MILLISECONDS); + + // --> SSL Settings + + private boolean sslEnabled = DEFAULT_MONGO_SETTINGS.getSslSettings().isEnabled(); + private boolean sslInvalidHostNameAllowed = DEFAULT_MONGO_SETTINGS.getSslSettings().isInvalidHostNameAllowed(); + private String sslProvider = DEFAULT_MONGO_SETTINGS.getSslSettings().isEnabled() + ? DEFAULT_MONGO_SETTINGS.getSslSettings().getContext().getProvider().getName() + : ""; + + // encryption and retry + + private @Nullable AutoEncryptionSettings autoEncryptionSettings; + private @Nullable ServerApi serverApi; + + /** + * @param socketConnectTimeoutMS in msec + * @see com.mongodb.connection.SocketSettings.Builder#connectTimeout(int, TimeUnit) + */ + public void setSocketConnectTimeoutMS(int socketConnectTimeoutMS) { + this.socketConnectTimeoutMS = socketConnectTimeoutMS; + } + + /** + * @param socketReadTimeoutMS in msec + * @see com.mongodb.connection.SocketSettings.Builder#readTimeout(int, TimeUnit) + */ + public void setSocketReadTimeoutMS(int socketReadTimeoutMS) { + this.socketReadTimeoutMS = socketReadTimeoutMS; + } + + /** + * @param socketReceiveBufferSize + * @see com.mongodb.connection.SocketSettings.Builder#receiveBufferSize(int) + */ + public void setSocketReceiveBufferSize(int socketReceiveBufferSize) { + this.socketReceiveBufferSize = socketReceiveBufferSize; + } + + /** + * @param socketSendBufferSize + * @see com.mongodb.connection.SocketSettings.Builder#sendBufferSize(int) + */ + public void setSocketSendBufferSize(int socketSendBufferSize) { + this.socketSendBufferSize = socketSendBufferSize; + } + + // --> Server Settings + + private long serverHeartbeatFrequencyMS = DEFAULT_MONGO_SETTINGS.getServerSettings() + .getHeartbeatFrequency(TimeUnit.MILLISECONDS); + private long serverMinHeartbeatFrequencyMS = DEFAULT_MONGO_SETTINGS.getServerSettings() + .getMinHeartbeatFrequency(TimeUnit.MILLISECONDS); + + /** + * @param serverHeartbeatFrequencyMS in msec + * @see com.mongodb.connection.ServerSettings.Builder#heartbeatFrequency(long, TimeUnit) + */ + public void setServerHeartbeatFrequencyMS(long serverHeartbeatFrequencyMS) { + this.serverHeartbeatFrequencyMS = serverHeartbeatFrequencyMS; + } + + /** + * @param serverMinHeartbeatFrequencyMS in msec + * @see com.mongodb.connection.ServerSettings.Builder#minHeartbeatFrequency(long, TimeUnit) + */ + public void setServerMinHeartbeatFrequencyMS(long serverMinHeartbeatFrequencyMS) { + this.serverMinHeartbeatFrequencyMS = serverMinHeartbeatFrequencyMS; + } + + // --> Cluster Settings + + /** + * @param clusterSrvHost + * @see com.mongodb.connection.ClusterSettings.Builder#srvHost(String) + */ + public void setClusterSrvHost(String clusterSrvHost) { + this.clusterSrvHost = clusterSrvHost; + } + + /** + * @param clusterHosts + * @see com.mongodb.connection.ClusterSettings.Builder#hosts(List) + */ + public void setClusterHosts(ServerAddress[] clusterHosts) { + this.clusterHosts = Arrays.asList(clusterHosts); + } + + /** + * ???? + * + * @param clusterConnectionMode + * @see com.mongodb.connection.ClusterSettings.Builder#mode(ClusterConnectionMode) + */ + public void setClusterConnectionMode(ClusterConnectionMode clusterConnectionMode) { + this.clusterConnectionMode = clusterConnectionMode; + } + + /** + * @param custerRequiredClusterType + * @see com.mongodb.connection.ClusterSettings.Builder#requiredClusterType(ClusterType) + */ + public void setCusterRequiredClusterType(ClusterType custerRequiredClusterType) { + this.custerRequiredClusterType = custerRequiredClusterType; + } + + /** + * @param clusterRequiredReplicaSetName + * @see com.mongodb.connection.ClusterSettings.Builder#requiredReplicaSetName(String) + */ + public void setClusterRequiredReplicaSetName(String clusterRequiredReplicaSetName) { + this.clusterRequiredReplicaSetName = clusterRequiredReplicaSetName; + } + + /** + * @param clusterLocalThresholdMS in msec + * @see com.mongodb.connection.ClusterSettings.Builder#localThreshold(long, TimeUnit) + */ + public void setClusterLocalThresholdMS(long clusterLocalThresholdMS) { + this.clusterLocalThresholdMS = clusterLocalThresholdMS; + } + + /** + * @param clusterServerSelectionTimeoutMS in msec + * @see com.mongodb.connection.ClusterSettings.Builder#serverSelectionTimeout(long, TimeUnit) + */ + public void setClusterServerSelectionTimeoutMS(long clusterServerSelectionTimeoutMS) { + this.clusterServerSelectionTimeoutMS = clusterServerSelectionTimeoutMS; + } + + // --> ConnectionPoolSettings + + /** + * @param poolMaxSize + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxSize(int) + */ + public void setPoolMaxSize(int poolMaxSize) { + this.poolMaxSize = poolMaxSize; + } + + /** + * @param poolMinSize + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#minSize(int) + */ + public void setPoolMinSize(int poolMinSize) { + this.poolMinSize = poolMinSize; + } + + /** + * @param poolMaxWaitTimeMS in mesec + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxWaitTime(long, TimeUnit) + */ + public void setPoolMaxWaitTimeMS(long poolMaxWaitTimeMS) { + this.poolMaxWaitTimeMS = poolMaxWaitTimeMS; + } + + /** + * @param poolMaxConnectionLifeTimeMS in msec + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxConnectionLifeTime(long, TimeUnit) + */ + public void setPoolMaxConnectionLifeTimeMS(long poolMaxConnectionLifeTimeMS) { + this.poolMaxConnectionLifeTimeMS = poolMaxConnectionLifeTimeMS; + } + + /** + * @param poolMaxConnectionIdleTimeMS in msec + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maxConnectionIdleTime(long, TimeUnit) + */ + public void setPoolMaxConnectionIdleTimeMS(long poolMaxConnectionIdleTimeMS) { + this.poolMaxConnectionIdleTimeMS = poolMaxConnectionIdleTimeMS; + } + + /** + * @param poolMaintenanceInitialDelayMS in msec + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maintenanceInitialDelay(long, TimeUnit) + */ + public void setPoolMaintenanceInitialDelayMS(long poolMaintenanceInitialDelayMS) { + this.poolMaintenanceInitialDelayMS = poolMaintenanceInitialDelayMS; + } + + /** + * @param poolMaintenanceFrequencyMS in msec + * @see com.mongodb.connection.ConnectionPoolSettings.Builder#maintenanceFrequency(long, TimeUnit) + */ + public void setPoolMaintenanceFrequencyMS(long poolMaintenanceFrequencyMS) { + this.poolMaintenanceFrequencyMS = poolMaintenanceFrequencyMS; + } + + // --> SSL Settings + + /** + * @param sslEnabled + * @see com.mongodb.connection.SslSettings.Builder#enabled(boolean) + */ + public void setSslEnabled(Boolean sslEnabled) { + this.sslEnabled = sslEnabled; + } + + /** + * @param sslInvalidHostNameAllowed + * @see com.mongodb.connection.SslSettings.Builder#invalidHostNameAllowed(boolean) + */ + public void setSslInvalidHostNameAllowed(Boolean sslInvalidHostNameAllowed) { + this.sslInvalidHostNameAllowed = sslInvalidHostNameAllowed; + } + + /** + * @param sslProvider + * @see com.mongodb.connection.SslSettings.Builder#context(SSLContext) + * @see SSLContext#getInstance(String) + */ + public void setSslProvider(String sslProvider) { + this.sslProvider = sslProvider; + } + + // encryption and retry + + /** + * @param applicationName + * @see MongoClientSettings.Builder#applicationName(String) + */ + public void setApplicationName(@Nullable String applicationName) { + this.applicationName = applicationName; + } + + /** + * @param retryReads + * @see MongoClientSettings.Builder#retryReads(boolean) + */ + public void setRetryReads(@Nullable Boolean retryReads) { + this.retryReads = retryReads; + } + + /** + * @param readConcern + * @see MongoClientSettings.Builder#readConcern(ReadConcern) + */ + public void setReadConcern(ReadConcern readConcern) { + this.readConcern = readConcern; + } + + /** + * @param writeConcern + * @see MongoClientSettings.Builder#writeConcern(WriteConcern) + */ + public void setWriteConcern(WriteConcern writeConcern) { + this.writeConcern = writeConcern; + } + + /** + * @param retryWrites + * @see MongoClientSettings.Builder#retryWrites(boolean) + */ + public void setRetryWrites(@Nullable Boolean retryWrites) { + this.retryWrites = retryWrites; + } + + /** + * @param readPreference + * @see MongoClientSettings.Builder#readPreference(ReadPreference) + */ + public void setReadPreference(ReadPreference readPreference) { + this.readPreference = readPreference; + } + + public void setTransportSettings(@Nullable TransportSettings transportSettings) { + this.transportSettings = transportSettings; + } + + /** + * @param codecRegistry + * @see MongoClientSettings.Builder#codecRegistry(CodecRegistry) + */ + public void setCodecRegistry(CodecRegistry codecRegistry) { + this.codecRegistry = codecRegistry; + } + + /** + * @param uUidRepresentation + */ + public void setuUidRepresentation(@Nullable UuidRepresentation uUidRepresentation) { + this.uUidRepresentation = uUidRepresentation; + } + + /** + * @param autoEncryptionSettings can be {@literal null}. + * @see MongoClientSettings.Builder#autoEncryptionSettings(AutoEncryptionSettings) + */ + public void setAutoEncryptionSettings(@Nullable AutoEncryptionSettings autoEncryptionSettings) { + this.autoEncryptionSettings = autoEncryptionSettings; + } + + /** + * @param serverApi can be {@literal null}. + * @see MongoClientSettings.Builder#serverApi(ServerApi) + * @since 3.3 + */ + public void setServerApi(@Nullable ServerApi serverApi) { + this.serverApi = serverApi; + } + + @Override + public Class getObjectType() { + return MongoClientSettings.class; + } + + @Override + protected MongoClientSettings createInstance() { + + Builder builder = MongoClientSettings.builder() // + .readPreference(readPreference) // + .writeConcern(writeConcern) // + .readConcern(readConcern) // + .codecRegistry(codecRegistry) // + .applicationName(applicationName) // + .autoEncryptionSettings(autoEncryptionSettings) // + .applyToClusterSettings((settings) -> { + + settings.serverSelectionTimeout(clusterServerSelectionTimeoutMS, TimeUnit.MILLISECONDS); + if (clusterConnectionMode != null) { + settings.mode(clusterConnectionMode); + } + settings.requiredReplicaSetName(clusterRequiredReplicaSetName); + + if (!CollectionUtils.isEmpty(clusterHosts)) { + settings.hosts(clusterHosts); + } + settings.localThreshold(clusterLocalThresholdMS, TimeUnit.MILLISECONDS); + settings.requiredClusterType(custerRequiredClusterType); + + if (StringUtils.hasText(clusterSrvHost)) { + settings.srvHost(clusterSrvHost); + } + }) // + .applyToConnectionPoolSettings((settings) -> { + + settings.minSize(poolMinSize); + settings.maxSize(poolMaxSize); + settings.maxConnectionIdleTime(poolMaxConnectionIdleTimeMS, TimeUnit.MILLISECONDS); + settings.maxWaitTime(poolMaxWaitTimeMS, TimeUnit.MILLISECONDS); + settings.maxConnectionLifeTime(poolMaxConnectionLifeTimeMS, TimeUnit.MILLISECONDS); + // settings.maxWaitQueueSize(poolMaxWaitQueueSize); + settings.maintenanceFrequency(poolMaintenanceFrequencyMS, TimeUnit.MILLISECONDS); + settings.maintenanceInitialDelay(poolMaintenanceInitialDelayMS, TimeUnit.MILLISECONDS); + }) // + .applyToServerSettings((settings) -> { + + settings.minHeartbeatFrequency(serverMinHeartbeatFrequencyMS, TimeUnit.MILLISECONDS); + settings.heartbeatFrequency(serverHeartbeatFrequencyMS, TimeUnit.MILLISECONDS); + }) // + .applyToSocketSettings((settings) -> { + + settings.connectTimeout(socketConnectTimeoutMS, TimeUnit.MILLISECONDS); + settings.readTimeout(socketReadTimeoutMS, TimeUnit.MILLISECONDS); + settings.receiveBufferSize(socketReceiveBufferSize); + settings.sendBufferSize(socketSendBufferSize); + }) // + .applyToSslSettings((settings) -> { + + settings.enabled(sslEnabled); + if (sslEnabled) { + + settings.invalidHostNameAllowed(sslInvalidHostNameAllowed); + try { + settings.context( + StringUtils.hasText(sslProvider) ? SSLContext.getInstance(sslProvider) : SSLContext.getDefault()); + } catch (NoSuchAlgorithmException e) { + throw new IllegalArgumentException(e.getMessage(), e); + } + } + }); + + if (transportSettings != null) { + builder.transportSettings(transportSettings); + } + + if (retryReads != null) { + builder = builder.retryReads(retryReads); + } + + if (retryWrites != null) { + builder = builder.retryWrites(retryWrites); + } + if (uUidRepresentation != null) { + builder = builder.uuidRepresentation(uUidRepresentation); + } + if (serverApi != null) { + builder = builder.serverApi(serverApi); + } + + return builder.build(); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDataIntegrityViolationException.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDataIntegrityViolationException.java index dbebc02c31..df58a36770 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDataIntegrityViolationException.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDataIntegrityViolationException.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,10 +16,9 @@ package org.springframework.data.mongodb.core; import org.springframework.dao.DataIntegrityViolationException; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; -import com.mongodb.WriteResult; +import com.mongodb.WriteConcernResult; /** * Mongo-specific {@link DataIntegrityViolationException}. @@ -30,39 +29,39 @@ public class MongoDataIntegrityViolationException extends DataIntegrityViolation private static final long serialVersionUID = -186980521176764046L; - private final WriteResult writeResult; + private final WriteConcernResult writeResult; private final MongoActionOperation actionOperation; /** - * Creates a new {@link MongoDataIntegrityViolationException} using the given message and {@link WriteResult}. + * Creates a new {@link MongoDataIntegrityViolationException} using the given message and {@link WriteConcernResult}. * * @param message the exception message - * @param writeResult the {@link WriteResult} that causes the exception, must not be {@literal null}. + * @param writeResult the {@link WriteConcernResult} that causes the exception, must not be {@literal null}. * @param actionOperation the {@link MongoActionOperation} that caused the exception, must not be {@literal null}. */ - public MongoDataIntegrityViolationException(String message, WriteResult writeResult, - MongoActionOperation actionOperation) { + public MongoDataIntegrityViolationException(String message, WriteConcernResult writeResult, + MongoActionOperation actionOperation) { super(message); - Assert.notNull(writeResult, "WriteResult must not be null!"); - Assert.notNull(actionOperation, "MongoActionOperation must not be null!"); + Assert.notNull(writeResult, "WriteResult must not be null"); + Assert.notNull(actionOperation, "MongoActionOperation must not be null"); this.writeResult = writeResult; this.actionOperation = actionOperation; } /** - * Returns the {@link WriteResult} that caused the exception. + * Returns the {@link WriteConcernResult} that caused the exception. * * @return the writeResult */ - public WriteResult getWriteResult() { + public WriteConcernResult getWriteResult() { return writeResult; } /** - * Returns the {@link MongoActionOperation} in which the current exception occured. + * Returns the {@link MongoActionOperation} in which the current exception occurred. * * @return the actionOperation */ diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDbFactorySupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java similarity index 54% rename from spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDbFactorySupport.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java index 1aa8254f89..0a62b7aa49 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDbFactorySupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoDatabaseFactorySupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,62 +15,59 @@ */ package org.springframework.data.mongodb.core; -import lombok.Value; - +import org.jspecify.annotations.Nullable; import org.springframework.aop.framework.ProxyFactory; import org.springframework.dao.DataAccessException; import org.springframework.dao.support.PersistenceExceptionTranslator; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.SessionAwareMethodInterceptor; -import org.springframework.lang.Nullable; +import org.springframework.lang.Contract; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; import com.mongodb.ClientSessionOptions; -import com.mongodb.DB; import com.mongodb.WriteConcern; import com.mongodb.client.ClientSession; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; /** - * Common base class for usage with both {@link com.mongodb.client.MongoClients} and {@link com.mongodb.MongoClient} - * defining common properties such as database name and exception translator. - *

+ * Common base class for usage with both {@link com.mongodb.client.MongoClients} defining common properties such as + * database name and exception translator.
* Not intended to be used directly. * * @author Christoph Strobl * @author Mark Paluch * @param Client type. - * @since 2.1 - * @see SimpleMongoDbFactory - * @see SimpleMongoClientDbFactory + * @since 3.0 + * @see SimpleMongoClientDatabaseFactory */ -public abstract class MongoDbFactorySupport implements MongoDbFactory { +public abstract class MongoDatabaseFactorySupport implements MongoDatabaseFactory { private final C mongoClient; private final String databaseName; private final boolean mongoInstanceCreated; - private final PersistenceExceptionTranslator exceptionTranslator; + private PersistenceExceptionTranslator exceptionTranslator; private @Nullable WriteConcern writeConcern; /** - * Create a new {@link MongoDbFactorySupport} object given {@code mongoClient}, {@code databaseName}, + * Create a new {@link MongoDatabaseFactorySupport} object given {@code mongoClient}, {@code databaseName}, * {@code mongoInstanceCreated} and {@link PersistenceExceptionTranslator}. - * + * * @param mongoClient must not be {@literal null}. * @param databaseName must not be {@literal null} or empty. * @param mongoInstanceCreated {@literal true} if the client instance was created by a subclass of - * {@link MongoDbFactorySupport} to close the client on {@link #destroy()}. + * {@link MongoDatabaseFactorySupport} to close the client on {@link #destroy()}. * @param exceptionTranslator must not be {@literal null}. */ - protected MongoDbFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated, + protected MongoDatabaseFactorySupport(C mongoClient, String databaseName, boolean mongoInstanceCreated, PersistenceExceptionTranslator exceptionTranslator) { - Assert.notNull(mongoClient, "MongoClient must not be null!"); - Assert.hasText(databaseName, "Database name must not be empty!"); + Assert.notNull(mongoClient, "MongoClient must not be null"); + Assert.hasText(databaseName, "Database name must not be empty"); Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"), - "Database name must not contain slashes, dots, spaces, quotes, or dollar signs!"); + "Database name must not contain slashes, dots, spaces, quotes, or dollar signs"); this.mongoClient = mongoClient; this.databaseName = databaseName; @@ -78,31 +75,39 @@ protected MongoDbFactorySupport(C mongoClient, String databaseName, boolean mong this.exceptionTranslator = exceptionTranslator; } + /** + * Configures the {@link PersistenceExceptionTranslator} to be used. + * + * @param exceptionTranslator the exception translator to set. + * @since 4.4 + */ + public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) { + this.exceptionTranslator = exceptionTranslator; + } + + @Override + public PersistenceExceptionTranslator getExceptionTranslator() { + return this.exceptionTranslator; + } + /** * Configures the {@link WriteConcern} to be used on the {@link MongoDatabase} instance being created. * - * @param writeConcern the writeConcern to set + * @param writeConcern the writeConcern to set. */ public void setWriteConcern(WriteConcern writeConcern) { this.writeConcern = writeConcern; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getDb() - */ - public MongoDatabase getDb() throws DataAccessException { - return getDb(databaseName); + @Override + public MongoDatabase getMongoDatabase() throws DataAccessException { + return getMongoDatabase(getDefaultDatabaseName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getDb(java.lang.String) - */ @Override - public MongoDatabase getDb(String dbName) throws DataAccessException { + public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException { - Assert.hasText(dbName, "Database name must not be empty!"); + Assert.hasText(dbName, "Database name must not be empty"); MongoDatabase db = doGetMongoDatabase(dbName); @@ -115,36 +120,22 @@ public MongoDatabase getDb(String dbName) throws DataAccessException { /** * Get the actual {@link MongoDatabase} from the client. - * + * * @param dbName must not be {@literal null} or empty. * @return */ protected abstract MongoDatabase doGetMongoDatabase(String dbName); - /* - * (non-Javadoc) - * @see org.springframework.beans.factory.DisposableBean#destroy() - */ public void destroy() throws Exception { if (mongoInstanceCreated) { closeClient(); } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator() - */ - public PersistenceExceptionTranslator getExceptionTranslator() { - return this.exceptionTranslator; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.Session) - */ - public MongoDbFactory withSession(ClientSession session) { - return new MongoDbFactorySupport.ClientSessionBoundMongoDbFactory(session, this); + @Override + @Contract("_ -> new") + public MongoDatabaseFactory withSession(ClientSession session) { + return new MongoDatabaseFactorySupport.ClientSessionBoundMongoDbFactory(session, this); } /** @@ -167,72 +158,52 @@ protected String getDefaultDatabaseName() { } /** - * {@link ClientSession} bound {@link MongoDbFactory} decorating the database with a + * {@link ClientSession} bound {@link MongoDatabaseFactory} decorating the database with a * {@link SessionAwareMethodInterceptor}. * * @author Christoph Strobl * @since 2.1 */ - @Value - static class ClientSessionBoundMongoDbFactory implements MongoDbFactory { + static final class ClientSessionBoundMongoDbFactory implements MongoDatabaseFactory { - ClientSession session; - MongoDbFactory delegate; + private final ClientSession session; + private final MongoDatabaseFactory delegate; - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getDb() - */ - @Override - public MongoDatabase getDb() throws DataAccessException { - return proxyMongoDatabase(delegate.getDb()); + public ClientSessionBoundMongoDbFactory(ClientSession session, MongoDatabaseFactory delegate) { + this.session = session; + this.delegate = delegate; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getDb(java.lang.String) - */ @Override - public MongoDatabase getDb(String dbName) throws DataAccessException { - return proxyMongoDatabase(delegate.getDb(dbName)); + public MongoDatabase getMongoDatabase() throws DataAccessException { + return proxyMongoDatabase(delegate.getMongoDatabase()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getExceptionTranslator() - */ @Override - public PersistenceExceptionTranslator getExceptionTranslator() { - return delegate.getExceptionTranslator(); + public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException { + return proxyMongoDatabase(delegate.getMongoDatabase(dbName)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getLegacyDb() - */ @Override - public DB getLegacyDb() { - return delegate.getLegacyDb(); + public PersistenceExceptionTranslator getExceptionTranslator() { + return delegate.getExceptionTranslator(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions) - */ @Override public ClientSession getSession(ClientSessionOptions options) { return delegate.getSession(options); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#withSession(com.mongodb.session.ClientSession) - */ @Override - public MongoDbFactory withSession(ClientSession session) { + public MongoDatabaseFactory withSession(ClientSession session) { return delegate.withSession(session); } + @Override + public boolean isTransactionActive() { + return session != null && session.hasActiveTransaction(); + } + private MongoDatabase proxyMongoDatabase(MongoDatabase database) { return createProxyInstance(session, database, MongoDatabase.class); } @@ -241,7 +212,8 @@ private MongoDatabase proxyDatabase(com.mongodb.session.ClientSession session, M return createProxyInstance(session, database, MongoDatabase.class); } - private MongoCollection proxyCollection(com.mongodb.session.ClientSession session, MongoCollection collection) { + private MongoCollection proxyCollection(com.mongodb.session.ClientSession session, + MongoCollection collection) { return createProxyInstance(session, collection, MongoCollection.class); } @@ -255,8 +227,42 @@ private T createProxyInstance(com.mongodb.session.ClientSession session, T t factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class, this::proxyDatabase, MongoCollection.class, this::proxyCollection)); - return targetType.cast(factory.getProxy()); + return targetType.cast(factory.getProxy(target.getClass().getClassLoader())); } - } + public ClientSession getSession() { + return this.session; + } + + public MongoDatabaseFactory getDelegate() { + return this.delegate; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + ClientSessionBoundMongoDbFactory that = (ClientSessionBoundMongoDbFactory) o; + + if (!ObjectUtils.nullSafeEquals(this.session, that.session)) { + return false; + } + return ObjectUtils.nullSafeEquals(this.delegate, that.delegate); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(this.session); + result = 31 * result + ObjectUtils.nullSafeHashCode(this.delegate); + return result; + } + + public String toString() { + return "MongoDatabaseFactorySupport.ClientSessionBoundMongoDbFactory(session=" + this.getSession() + ", delegate=" + + this.getDelegate() + ")"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBean.java new file mode 100644 index 0000000000..f361b19bba --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoEncryptionSettingsFactoryBean.java @@ -0,0 +1,112 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Collections; +import java.util.Map; + +import org.bson.BsonDocument; +import org.jspecify.annotations.Nullable; +import org.springframework.beans.factory.FactoryBean; + +import com.mongodb.AutoEncryptionSettings; +import com.mongodb.MongoClientSettings; + +/** + * {@link FactoryBean} for creating {@link AutoEncryptionSettings} using the {@link AutoEncryptionSettings.Builder}. + * + * @author Christoph Strobl + * @since 2.2 + */ +public class MongoEncryptionSettingsFactoryBean implements FactoryBean { + + private boolean bypassAutoEncryption; + private @Nullable String keyVaultNamespace; + private @Nullable Map extraOptions; + private @Nullable MongoClientSettings keyVaultClientSettings; + private @Nullable Map> kmsProviders; + private @Nullable Map schemaMap; + + /** + * @param bypassAutoEncryption + * @see AutoEncryptionSettings.Builder#bypassAutoEncryption(boolean) + */ + public void setBypassAutoEncryption(boolean bypassAutoEncryption) { + this.bypassAutoEncryption = bypassAutoEncryption; + } + + /** + * @param extraOptions + * @see AutoEncryptionSettings.Builder#extraOptions(Map) + */ + public void setExtraOptions(Map extraOptions) { + this.extraOptions = extraOptions; + } + + /** + * @param keyVaultNamespace + * @see AutoEncryptionSettings.Builder#keyVaultNamespace(String) + */ + public void setKeyVaultNamespace(String keyVaultNamespace) { + this.keyVaultNamespace = keyVaultNamespace; + } + + /** + * @param keyVaultClientSettings + * @see AutoEncryptionSettings.Builder#keyVaultMongoClientSettings(MongoClientSettings) + */ + public void setKeyVaultClientSettings(MongoClientSettings keyVaultClientSettings) { + this.keyVaultClientSettings = keyVaultClientSettings; + } + + /** + * @param kmsProviders + * @see AutoEncryptionSettings.Builder#kmsProviders(Map) + */ + public void setKmsProviders(Map> kmsProviders) { + this.kmsProviders = kmsProviders; + } + + /** + * @param schemaMap + * @see AutoEncryptionSettings.Builder#schemaMap(Map) + */ + public void setSchemaMap(Map schemaMap) { + this.schemaMap = schemaMap; + } + + @Override + public AutoEncryptionSettings getObject() { + + return AutoEncryptionSettings.builder() // + .bypassAutoEncryption(bypassAutoEncryption) // + .keyVaultNamespace(keyVaultNamespace) // + .keyVaultMongoClientSettings(keyVaultClientSettings) // + .kmsProviders(orEmpty(kmsProviders)) // + .extraOptions(orEmpty(extraOptions)) // + .schemaMap(orEmpty(schemaMap)) // + .build(); + } + + private Map orEmpty(@Nullable Map source) { + return source != null ? source : Collections.emptyMap(); + } + + @Override + public Class getObjectType() { + return AutoEncryptionSettings.class; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java index 4e68d52a2e..2bde873c2f 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoExceptionTranslator.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,11 +15,10 @@ */ package org.springframework.data.mongodb.core; -import java.util.Arrays; -import java.util.HashSet; import java.util.Set; import org.bson.BsonInvalidOperationException; +import org.jspecify.annotations.Nullable; import org.springframework.dao.DataAccessException; import org.springframework.dao.DataAccessResourceFailureException; import org.springframework.dao.DataIntegrityViolationException; @@ -28,17 +27,16 @@ import org.springframework.dao.InvalidDataAccessResourceUsageException; import org.springframework.dao.PermissionDeniedDataAccessException; import org.springframework.dao.support.PersistenceExceptionTranslator; -import org.springframework.data.mongodb.BulkOperationException; import org.springframework.data.mongodb.ClientSessionException; +import org.springframework.data.mongodb.TransientClientSessionException; import org.springframework.data.mongodb.UncategorizedMongoDbException; import org.springframework.data.mongodb.util.MongoDbErrorCodes; -import org.springframework.lang.Nullable; import org.springframework.util.ClassUtils; -import com.mongodb.BulkWriteException; import com.mongodb.MongoBulkWriteException; import com.mongodb.MongoException; import com.mongodb.MongoServerException; +import com.mongodb.MongoSocketException; import com.mongodb.bulk.BulkWriteError; /** @@ -49,28 +47,34 @@ * @author Oliver Gierke * @author Michal Vich * @author Christoph Strobl + * @author Brice Vandeputte */ public class MongoExceptionTranslator implements PersistenceExceptionTranslator { - private static final Set DULICATE_KEY_EXCEPTIONS = new HashSet( - Arrays.asList("MongoException.DuplicateKey", "DuplicateKeyException")); + public static final MongoExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator(); - private static final Set RESOURCE_FAILURE_EXCEPTIONS = new HashSet( - Arrays.asList("MongoException.Network", "MongoSocketException", "MongoException.CursorNotFound", - "MongoCursorNotFoundException", "MongoServerSelectionException", "MongoTimeoutException")); + private static final Set DUPLICATE_KEY_EXCEPTIONS = Set.of("MongoException.DuplicateKey", + "DuplicateKeyException"); - private static final Set RESOURCE_USAGE_EXCEPTIONS = new HashSet( - Arrays.asList("MongoInternalException")); + private static final Set RESOURCE_FAILURE_EXCEPTIONS = Set.of("MongoException.Network", + "MongoSocketException", "MongoException.CursorNotFound", "MongoCursorNotFoundException", + "MongoServerSelectionException", "MongoTimeoutException"); - private static final Set DATA_INTEGRETY_EXCEPTIONS = new HashSet( - Arrays.asList("WriteConcernException", "MongoWriteException", "MongoBulkWriteException")); + private static final Set RESOURCE_USAGE_EXCEPTIONS = Set.of("MongoInternalException"); + + private static final Set DATA_INTEGRITY_EXCEPTIONS = Set.of("WriteConcernException", "MongoWriteException", + "MongoBulkWriteException"); + + private static final Set SECURITY_EXCEPTIONS = Set.of("MongoCryptException"); + + @Override + public @Nullable DataAccessException translateExceptionIfPossible(RuntimeException ex) { + return doTranslateException(ex); + } - /* - * (non-Javadoc) - * @see org.springframework.dao.support.PersistenceExceptionTranslator#translateExceptionIfPossible(java.lang.RuntimeException) - */ @Nullable - public DataAccessException translateExceptionIfPossible(RuntimeException ex) { + @SuppressWarnings("NullAway") + DataAccessException doTranslateException(RuntimeException ex) { // Check for well-known MongoException subclasses. @@ -78,9 +82,13 @@ public DataAccessException translateExceptionIfPossible(RuntimeException ex) { throw new InvalidDataAccessApiUsageException(ex.getMessage(), ex); } + if (ex instanceof MongoSocketException) { + return new DataAccessResourceFailureException(ex.getMessage(), ex); + } + String exception = ClassUtils.getShortName(ClassUtils.getUserClass(ex.getClass())); - if (DULICATE_KEY_EXCEPTIONS.contains(exception)) { + if (DUPLICATE_KEY_EXCEPTIONS.contains(exception)) { return new DuplicateKeyException(ex.getMessage(), ex); } @@ -92,15 +100,15 @@ public DataAccessException translateExceptionIfPossible(RuntimeException ex) { return new InvalidDataAccessResourceUsageException(ex.getMessage(), ex); } - if (DATA_INTEGRETY_EXCEPTIONS.contains(exception)) { + if (DATA_INTEGRITY_EXCEPTIONS.contains(exception)) { if (ex instanceof MongoServerException) { - if (((MongoServerException) ex).getCode() == 11000) { + if (MongoDbErrorCodes.isDataDuplicateKeyError(ex)) { return new DuplicateKeyException(ex.getMessage(), ex); } - if (ex instanceof MongoBulkWriteException) { - for (BulkWriteError x : ((MongoBulkWriteException) ex).getWriteErrors()) { - if (x.getCode() == 11000) { + if (ex instanceof MongoBulkWriteException bulkException) { + for (BulkWriteError writeError : bulkException.getWriteErrors()) { + if (MongoDbErrorCodes.isDuplicateKeyCode(writeError.getCode())) { return new DuplicateKeyException(ex.getMessage(), ex); } } @@ -110,25 +118,35 @@ public DataAccessException translateExceptionIfPossible(RuntimeException ex) { return new DataIntegrityViolationException(ex.getMessage(), ex); } - if (ex instanceof BulkWriteException) { - return new BulkOperationException(ex.getMessage(), (BulkWriteException) ex); - } - // All other MongoExceptions - if (ex instanceof MongoException) { + if (ex instanceof MongoException mongoException) { - int code = ((MongoException) ex).getCode(); + int code = mongoException.getCode(); - if (MongoDbErrorCodes.isDuplicateKeyCode(code)) { + if (MongoDbErrorCodes.isDuplicateKeyError(mongoException)) { return new DuplicateKeyException(ex.getMessage(), ex); - } else if (MongoDbErrorCodes.isDataAccessResourceFailureCode(code)) { + } + if (MongoDbErrorCodes.isDataAccessResourceError(mongoException)) { return new DataAccessResourceFailureException(ex.getMessage(), ex); - } else if (MongoDbErrorCodes.isInvalidDataAccessApiUsageCode(code) || code == 10003 || code == 12001 - || code == 12010 || code == 12011 || code == 12012) { + } + if (MongoDbErrorCodes.isInvalidDataAccessApiUsageError(mongoException) || code == 12001 || code == 12010 + || code == 12011 || code == 12012) { return new InvalidDataAccessApiUsageException(ex.getMessage(), ex); - } else if (MongoDbErrorCodes.isPermissionDeniedCode(code)) { + } + if (MongoDbErrorCodes.isPermissionDeniedError(mongoException)) { return new PermissionDeniedDataAccessException(ex.getMessage(), ex); } + if (MongoDbErrorCodes.isDataIntegrityViolationError(mongoException)) { + return new DataIntegrityViolationException(mongoException.getMessage(), mongoException); + } + if (MongoDbErrorCodes.isClientSessionFailure(mongoException)) { + return isTransientFailure(mongoException) ? new TransientClientSessionException(ex.getMessage(), ex) + : new ClientSessionException(ex.getMessage(), ex); + } + if (ex.getCause() != null && SECURITY_EXCEPTIONS.contains(ClassUtils.getShortName(ex.getCause().getClass()))) { + return new PermissionDeniedDataAccessException(ex.getMessage(), ex); + } + return new UncategorizedMongoDbException(ex.getMessage(), ex); } @@ -147,4 +165,27 @@ public DataAccessException translateExceptionIfPossible(RuntimeException ex) { // that translation should not occur. return null; } + + /** + * Check if a given exception holds an error label indicating a transient failure. + * + * @param e the exception to inspect. + * @return {@literal true} if the given {@link Exception} is a {@link MongoException} holding one of the transient + * exception error labels. + * @see MongoException#hasErrorLabel(String) + * @since 4.4 + */ + public boolean isTransientFailure(Exception e) { + + if (e instanceof MongoException mongoException) { + return mongoException.hasErrorLabel(MongoException.TRANSIENT_TRANSACTION_ERROR_LABEL) + || mongoException.hasErrorLabel(MongoException.UNKNOWN_TRANSACTION_COMMIT_RESULT_LABEL); + } + + if (e.getCause() != e && e.getCause() instanceof Exception ex) { + return isTransientFailure(ex); + } + + return false; + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java new file mode 100644 index 0000000000..84c395bf2f --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoJsonSchemaCreator.java @@ -0,0 +1,250 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; +import java.util.function.Predicate; + +import org.springframework.data.mapping.PersistentProperty; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.mapping.Encrypted; +import org.springframework.data.mongodb.core.mapping.MongoMappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; +import org.springframework.data.mongodb.core.mapping.Unwrapped.Nullable; +import org.springframework.data.mongodb.core.schema.JsonSchemaProperty; +import org.springframework.data.mongodb.core.schema.MongoJsonSchema; +import org.springframework.util.Assert; + +/** + * {@link MongoJsonSchemaCreator} extracts the {@link MongoJsonSchema} for a given {@link Class} by applying the + * following mapping rules. + *

+ * Required Properties + *

+ *
    + *
  • Properties of primitive type
  • + *
+ * Ignored Properties + *
    + *
  • All properties annotated with {@link org.springframework.data.annotation.Transient}
  • + *
+ * Property Type Mapping + *
    + *
  • {@link java.lang.Object} -> {@code type : 'object'}
  • + *
  • {@link java.util.Arrays} -> {@code type : 'array'}
  • + *
  • {@link java.util.Collection} -> {@code type : 'array'}
  • + *
  • {@link java.util.Map} -> {@code type : 'object'}
  • + *
  • {@link java.lang.Enum} -> {@code type : 'string', enum : [the enum values]}
  • + *
  • Simple Types -> {@code type : 'the corresponding bson type' }
  • + *
  • Domain Types -> {@code type : 'object', properties : {the types properties} }
  • + *
+ *
+ * {@link org.springframework.data.annotation.Id _id} properties using types that can be converted into + * {@link org.bson.types.ObjectId} like {@link String} will be mapped to {@code type : 'object'} unless there is more + * specific information available via the {@link org.springframework.data.mongodb.core.mapping.MongoId} annotation. + * {@link Encrypted} properties will contain {@literal encrypt} information. + * + * @author Christoph Strobl + * @since 2.2 + */ +public interface MongoJsonSchemaCreator { + + /** + * Create the {@link MongoJsonSchema} for the given {@link Class type}. + * + * @param type must not be {@literal null}. + * @return never {@literal null}. + */ + MongoJsonSchema createSchemaFor(Class type); + + /** + * Create a merged {@link MongoJsonSchema} out of the individual schemas of the given types by merging their + * properties into one large {@link MongoJsonSchema schema}. + * + * @param types must not be {@literal null} nor contain {@literal null}. + * @return new instance of {@link MongoJsonSchema}. + * @since 3.4 + */ + default MongoJsonSchema mergedSchemaFor(Class... types) { + + MongoJsonSchema[] schemas = Arrays.stream(types).map(this::createSchemaFor).toArray(MongoJsonSchema[]::new); + return MongoJsonSchema.merge(schemas); + } + + /** + * Filter matching {@link JsonSchemaProperty properties}. + * + * @param filter the {@link Predicate} to evaluate for inclusion. Must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + MongoJsonSchemaCreator filter(Predicate filter); + + /** + * Entry point to specify additional behavior for a given path. + * + * @param path the path using {@literal dot '.'} notation. + * @return new instance of {@link PropertySpecifier}. + * @since 3.4 + */ + PropertySpecifier property(String path); + + /** + * The context in which a specific {@link #getProperty()} is encountered during schema creation. + * + * @since 3.3 + */ + interface JsonSchemaPropertyContext { + + /** + * The path to a given field/property in dot notation. + * + * @return never {@literal null}. + */ + String getPath(); + + /** + * The current property. + * + * @return never {@literal null}. + */ + MongoPersistentProperty getProperty(); + + /** + * Obtain the {@link MongoPersistentEntity} for a given property. + * + * @param property must not be {@literal null}. + * @param + * @return {@literal null} if the property is not an entity. It is nevertheless recommend to check + * {@link PersistentProperty#isEntity()} first. + */ + @Nullable MongoPersistentEntity resolveEntity(MongoPersistentProperty property); + + } + + /** + * A filter {@link Predicate} that matches {@link Encrypted encrypted properties} and those having nested ones. + * + * @return new instance of {@link Predicate}. + * @since 3.3 + */ + static Predicate encryptedOnly() { + + return new Predicate() { + + // cycle guard + private final Set seen = new HashSet<>(); + + @Override + public boolean test(JsonSchemaPropertyContext context) { + return extracted(context.getProperty(), context); + } + + @SuppressWarnings("NullAway") + private boolean extracted(MongoPersistentProperty property, JsonSchemaPropertyContext context) { + if (property.isAnnotationPresent(Encrypted.class)) { + return true; + } + + if (!property.isEntity() || seen.contains(property)) { + return false; + } + + seen.add(property); + + for (MongoPersistentProperty nested : context.resolveEntity(property)) { + if (extracted(nested, context)) { + return true; + } + } + return false; + } + }; + } + + /** + * Creates a new {@link MongoJsonSchemaCreator} that is aware of conversions applied by the given + * {@link MongoConverter}. + * + * @param mongoConverter must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + */ + static MongoJsonSchemaCreator create(MongoConverter mongoConverter) { + + Assert.notNull(mongoConverter, "MongoConverter must not be null"); + return new MappingMongoJsonSchemaCreator(mongoConverter); + } + + /** + * Creates a new {@link MongoJsonSchemaCreator} that is aware of type mappings and potential + * {@link org.springframework.data.spel.spi.EvaluationContextExtension extensions}. + * + * @param mappingContext must not be {@literal null}. + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + static MongoJsonSchemaCreator create(MappingContext mappingContext) { + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(config -> {})); + converter.afterPropertiesSet(); + + return create(converter); + } + + /** + * Creates a new {@link MongoJsonSchemaCreator} that does not consider potential extensions - suitable for testing. We + * recommend to use {@link #create(MappingContext)}. + * + * @return new instance of {@link MongoJsonSchemaCreator}. + * @since 3.3 + */ + static MongoJsonSchemaCreator create() { + + MongoMappingContext mappingContext = new MongoMappingContext(); + mappingContext.setSimpleTypeHolder(MongoSimpleTypes.HOLDER); + mappingContext.afterPropertiesSet(); + + MappingMongoConverter converter = new MappingMongoConverter(NoOpDbRefResolver.INSTANCE, mappingContext); + converter.setCustomConversions(MongoCustomConversions.create(config -> {})); + converter.afterPropertiesSet(); + + return create(converter); + } + + /** + * @author Christoph Strobl + * @since 3.4 + */ + interface PropertySpecifier { + + /** + * Set additional type parameters for polymorphic ones. + * + * @param types must not be {@literal null}. + * @return the source + */ + MongoJsonSchemaCreator withTypes(Class... types); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java index 85dbaf3479..6753f31c1a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,35 +18,42 @@ import java.util.Collection; import java.util.List; import java.util.Set; +import java.util.concurrent.locks.ReentrantLock; import java.util.function.Consumer; import java.util.function.Supplier; +import java.util.stream.Stream; import org.bson.Document; +import org.jspecify.annotations.Nullable; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Window; import org.springframework.data.geo.GeoResults; import org.springframework.data.mongodb.core.BulkOperations.BulkMode; import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationOperation; import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; import org.springframework.data.mongodb.core.aggregation.AggregationResults; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; import org.springframework.data.mongodb.core.convert.MappingMongoConverter; import org.springframework.data.mongodb.core.convert.MongoConverter; import org.springframework.data.mongodb.core.index.IndexOperations; -import org.springframework.data.mongodb.core.mapreduce.GroupBy; -import org.springframework.data.mongodb.core.mapreduce.GroupByResults; import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; import org.springframework.data.mongodb.core.mapreduce.MapReduceResults; import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; -import org.springframework.data.util.CloseableIterator; -import org.springframework.lang.Nullable; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.util.Lock; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import com.mongodb.ClientSessionOptions; -import com.mongodb.Cursor; import com.mongodb.ReadPreference; import com.mongodb.client.ClientSession; import com.mongodb.client.MongoCollection; @@ -56,7 +63,10 @@ /** * Interface that specifies a basic set of MongoDB operations. Implemented by {@link MongoTemplate}. Not often used but * a useful option for extensibility and testability (as it can be easily mocked, stubbed, or be the target of a JDK - * proxy). + * proxy).
+ * NOTE: Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB + * specific documentation to learn more about Multi + * Document Transactions. * * @author Thomas Risberg * @author Mark Pollack @@ -67,6 +77,7 @@ * @author Thomas Darimont * @author Maninder Singh * @author Mark Paluch + * @author Woojin Shin */ public interface MongoOperations extends FluentMongoOperations { @@ -74,12 +85,13 @@ public interface MongoOperations extends FluentMongoOperations { * The collection name used for the specified class by this template. * * @param entityClass must not be {@literal null}. - * @return + * @return never {@literal null}. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be derived from the type. */ String getCollectionName(Class entityClass); /** - * Execute the a MongoDB command expressed as a JSON string. Parsing is delegated to {@link Document#parse(String)} to + * Execute a MongoDB command expressed as a JSON string. Parsing is delegated to {@link Document#parse(String)} to * obtain the {@link Document} holding the actual command. Any errors that result from executing this command will be * converted into Spring's DAO exception hierarchy. * @@ -111,7 +123,7 @@ public interface MongoOperations extends FluentMongoOperations { /** * Execute a MongoDB query and iterate over the query results on a per-document basis with a DocumentCallbackHandler. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param collectionName name of the collection to retrieve the objects from. * @param dch the handler that will extract results, one document at a time. @@ -119,8 +131,7 @@ public interface MongoOperations extends FluentMongoOperations { void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch); /** - * Executes a {@link DbCallback} translating any exceptions as necessary. - *

+ * Executes a {@link DbCallback} translating any exceptions as necessary.
* Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not @@ -132,8 +143,7 @@ public interface MongoOperations extends FluentMongoOperations { T execute(DbCallback action); /** - * Executes the given {@link CollectionCallback} on the entity collection of the specified class. - *

+ * Executes the given {@link CollectionCallback} on the entity collection of the specified class.
* Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param entityClass class that determines the collection to use. Must not be {@literal null}. @@ -145,8 +155,7 @@ public interface MongoOperations extends FluentMongoOperations { T execute(Class entityClass, CollectionCallback action); /** - * Executes the given {@link CollectionCallback} on the collection of the given name. - *

+ * Executes the given {@link CollectionCallback} on the collection of the given name.
* Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be @@ -170,8 +179,7 @@ public interface MongoOperations extends FluentMongoOperations { /** * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession} - * provided by the given {@link Supplier} to each and every command issued against MongoDB. - *

+ * provided by the given {@link Supplier} to each and every command issued against MongoDB.
* Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use the * {@link SessionScoped#execute(SessionCallback, Consumer)} hook to potentially close the {@link ClientSession}. * @@ -180,21 +188,23 @@ public interface MongoOperations extends FluentMongoOperations { */ default SessionScoped withSession(Supplier sessionProvider) { - Assert.notNull(sessionProvider, "SessionProvider must not be null!"); + Assert.notNull(sessionProvider, "SessionProvider must not be null"); return new SessionScoped() { - private final Object lock = new Object(); - private @Nullable ClientSession session = null; + private final Lock lock = Lock.of(new ReentrantLock()); + private @Nullable ClientSession session; @Override - public T execute(SessionCallback action, Consumer onComplete) { + @SuppressWarnings("NullAway") + public @Nullable T execute(SessionCallback action, Consumer onComplete) { + + lock.executeWithoutResult(() -> { - synchronized (lock) { if (session == null) { session = sessionProvider.get(); } - } + }); try { return action.doInSession(MongoOperations.this.withSession(session)); @@ -206,8 +216,7 @@ public T execute(SessionCallback action, Consumer onComple } /** - * Obtain a {@link ClientSession} bound instance of {@link MongoOperations}. - *

+ * Obtain a {@link ClientSession} bound instance of {@link MongoOperations}.
* Note: It is up to the caller to manage the {@link ClientSession} lifecycle. * * @param session must not be {@literal null}. @@ -218,34 +227,36 @@ public T execute(SessionCallback action, Consumer onComple /** * Executes the given {@link Query} on the entity collection of the specified {@code entityType} backed by a Mongo DB - * {@link Cursor}. + * {@link com.mongodb.client.FindIterable}. *

- * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityType must not be {@literal null}. * @param element return type - * @return will never be {@literal null}. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 1.7 */ - CloseableIterator stream(Query query, Class entityType); + Stream stream(Query query, Class entityType); /** * Executes the given {@link Query} on the entity collection of the specified {@code entityType} and collection backed - * by a Mongo DB {@link Cursor}. + * by a Mongo DB {@link com.mongodb.client.FindIterable}. *

- * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.FindIterable} that needs to be closed. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityType must not be {@literal null}. * @param collectionName must not be {@literal null} or empty. * @param element return type - * @return will never be {@literal null}. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 1.10 */ - CloseableIterator stream(Query query, Class entityType, String collectionName); + Stream stream(Query query, Class entityType, String collectionName); /** * Create an uncapped collection with a name based on the provided entity class. @@ -281,6 +292,58 @@ public T execute(SessionCallback action, Consumer onComple */ MongoCollection createCollection(String collectionName, @Nullable CollectionOptions collectionOptions); + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationOperation pipeline + * stages} on another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param stages the {@link AggregationOperation aggregation pipeline stages} defining the view content. + * @since 4.0 + */ + default MongoCollection createView(String name, Class source, AggregationOperation... stages) { + return createView(name, source, AggregationPipeline.of(stages)); + } + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @since 4.0 + */ + default MongoCollection createView(String name, Class source, AggregationPipeline pipeline) { + return createView(name, source, pipeline, null); + } + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @param options additional settings to apply when creating the view. Can be {@literal null}. + * @since 4.0 + */ + MongoCollection createView(String name, Class source, AggregationPipeline pipeline, + @Nullable ViewOptions options); + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given source. + * + * @param name the name of the view to create. + * @param source the name of the collection or view defining the to be created views source. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @param options additional settings to apply when creating the view. Can be {@literal null}. + * @since 4.0 + */ + MongoCollection createView(String name, String source, AggregationPipeline pipeline, + @Nullable ViewOptions options); + /** * A set of collection names. * @@ -289,18 +352,19 @@ public T execute(SessionCallback action, Consumer onComple Set getCollectionNames(); /** - * Get a collection by name, creating it if it doesn't exist. - *

+ * Get a {@link MongoCollection} by its name. The returned collection may not exists yet (except in local memory) and + * is created on first interaction with the server. Collections can be explicitly created via + * {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class) + * exists} first.
* Translate any exceptions as necessary. * * @param collectionName name of the collection. Must not be {@literal null}. - * @return an existing collection or a newly created one. + * @return an existing collection or one created on first server interaction. */ MongoCollection getCollection(String collectionName); /** - * Check to see if a collection with a name indicated by the entity class exists. - *

+ * Check to see if a collection with a name indicated by the entity class exists.
* Translate any exceptions as necessary. * * @param entityClass class that determines the name of the collection. Must not be {@literal null}. @@ -309,8 +373,7 @@ public T execute(SessionCallback action, Consumer onComple boolean collectionExists(Class entityClass); /** - * Check to see if a collection with a given name exists. - *

+ * Check to see if a collection with a given name exists.
* Translate any exceptions as necessary. * * @param collectionName name of the collection. Must not be {@literal null}. @@ -319,8 +382,7 @@ public T execute(SessionCallback action, Consumer onComple boolean collectionExists(String collectionName); /** - * Drop the collection with the name indicated by the entity class. - *

+ * Drop the collection with the name indicated by the entity class.
* Translate any exceptions as necessary. * * @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}. @@ -328,8 +390,7 @@ public T execute(SessionCallback action, Consumer onComple void dropCollection(Class entityClass); /** - * Drop the collection with the given name. - *

+ * Drop the collection with the given name.
* Translate any exceptions as necessary. * * @param collectionName name of the collection to drop/delete. @@ -351,11 +412,13 @@ public T execute(SessionCallback action, Consumer onComple IndexOperations indexOps(Class entityClass); /** - * Returns the {@link ScriptOperations} that can be performed on {@link com.mongodb.DB} level. + * Returns the {@link ScriptOperations} that can be performed on {@link com.mongodb.client.MongoDatabase} level. * - * @return + * @return never {@literal null}. * @since 1.7 + * @deprecated since 2.2. The {@code eval} command has been removed without replacement in MongoDB Server 4.2.0. */ + @Deprecated ScriptOperations scriptOps(); /** @@ -390,11 +453,9 @@ public T execute(SessionCallback action, Consumer onComple BulkOperations bulkOps(BulkMode mode, @Nullable Class entityType, String collectionName); /** - * Query for a list of objects of type T from the collection used by the entity class. - *

+ * Query for a list of objects of type T from the collection used by the entity class.
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -404,11 +465,9 @@ public T execute(SessionCallback action, Consumer onComple List findAll(Class entityClass); /** - * Query for a list of objects of type T from the specified collection. - *

+ * Query for a list of objects of type T from the specified collection.
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -418,34 +477,6 @@ public T execute(SessionCallback action, Consumer onComple */ List findAll(Class entityClass, String collectionName); - /** - * Execute a group operation over the entire collection. The group operation entity class should match the 'shape' of - * the returned object that takes int account the initial document structure as well as any finalize functions. - * - * @param inputCollectionName the collection where the group operation will read from - * @param groupBy the conditions under which the group operation will be performed, e.g. keys, initial document, - * reduce function. - * @param entityClass The parametrized type of the returned list - * @return The results of the group operation - */ - GroupByResults group(String inputCollectionName, GroupBy groupBy, Class entityClass); - - /** - * Execute a group operation restricting the rows to those which match the provided Criteria. The group operation - * entity class should match the 'shape' of the returned object that takes int account the initial document structure - * as well as any finalize functions. - * - * @param criteria The criteria that restricts the row that are considered for grouping. If not specified all rows are - * considered. - * @param inputCollectionName the collection where the group operation will read from - * @param groupBy the conditions under which the group operation will be performed, e.g. keys, initial document, - * reduce function. - * @param entityClass The parametrized type of the returned list - * @return The results of the group operation - */ - GroupByResults group(@Nullable Criteria criteria, String inputCollectionName, GroupBy groupBy, - Class entityClass); - /** * Execute an aggregation operation. The raw results will be mapped to the given entity class. The name of the * inputCollection is derived from the inputType of the aggregation. @@ -498,11 +529,11 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN AggregationResults aggregate(Aggregation aggregation, String collectionName, Class outputType); /** - * Execute an aggregation operation backed by a Mongo DB {@link Cursor}. + * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. *

- * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw - * results will be mapped to the given entity class. The name of the inputCollection is derived from the inputType of - * the aggregation. + * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be + * closed. The raw results will be mapped to the given entity class. The name of the inputCollection is derived from + * the inputType of the aggregation. *

* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. @@ -511,35 +542,37 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN * {@literal null}. * @param collectionName The name of the input collection to use for the aggreation. * @param outputType The parametrized type of the returned list, must not be {@literal null}. - * @return The results of the aggregation operation. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 2.0 */ - CloseableIterator aggregateStream(TypedAggregation aggregation, String collectionName, Class outputType); + Stream aggregateStream(TypedAggregation aggregation, String collectionName, Class outputType); /** - * Execute an aggregation operation backed by a Mongo DB {@link Cursor}. - *

- * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw - * results will be mapped to the given entity class and are returned as stream. The name of the inputCollection is - * derived from the inputType of the aggregation. - *

+ * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. + *

+ * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be + * closed. The raw results will be mapped to the given entity class and are returned as stream. The name of the + * inputCollection is derived from the inputType of the aggregation. + *

* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. * * @param aggregation The {@link TypedAggregation} specification holding the aggregation operations, must not be * {@literal null}. * @param outputType The parametrized type of the returned list, must not be {@literal null}. - * @return The results of the aggregation operation. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 2.0 */ - CloseableIterator aggregateStream(TypedAggregation aggregation, Class outputType); + Stream aggregateStream(TypedAggregation aggregation, Class outputType); /** - * Execute an aggregation operation backed by a Mongo DB {@link Cursor}. - *

- * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw - * results will be mapped to the given entity class. - *

+ * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. + *

+ * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be + * closed. The raw results will be mapped to the given entity class. + *

* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. * @@ -548,17 +581,18 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN * @param inputType the inputType where the aggregation operation will read from, must not be {@literal null} or * empty. * @param outputType The parametrized type of the returned list, must not be {@literal null}. - * @return The results of the aggregation operation. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 2.0 */ - CloseableIterator aggregateStream(Aggregation aggregation, Class inputType, Class outputType); + Stream aggregateStream(Aggregation aggregation, Class inputType, Class outputType); /** - * Execute an aggregation operation backed by a Mongo DB {@link Cursor}. - *

- * Returns a {@link CloseableIterator} that wraps the a Mongo DB {@link Cursor} that needs to be closed. The raw - * results will be mapped to the given entity class. - *

+ * Execute an aggregation operation backed by a Mongo DB {@link com.mongodb.client.AggregateIterable}. + *

+ * Returns a {@link Stream} that wraps the Mongo DB {@link com.mongodb.client.AggregateIterable} that needs to be + * closed. The raw results will be mapped to the given entity class. + *

* Aggregation streaming can't be used with {@link AggregationOptions#isExplain() aggregation explain}. Enabling * explanation mode will throw an {@link IllegalArgumentException}. * @@ -567,10 +601,11 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN * @param collectionName the collection where the aggregation operation will read from, must not be {@literal null} or * empty. * @param outputType The parametrized type of the returned list, must not be {@literal null}. - * @return The results of the aggregation operation. + * @return the result {@link Stream}, containing mapped objects, needing to be closed once fully processed (e.g. + * through a try-with-resources clause). * @since 2.0 */ - CloseableIterator aggregateStream(Aggregation aggregation, String collectionName, Class outputType); + Stream aggregateStream(Aggregation aggregation, String collectionName, Class outputType); /** * Execute a map-reduce operation. The map-reduce operation will be formed with an output type of INLINE @@ -580,7 +615,9 @@ GroupByResults group(@Nullable Criteria criteria, String inputCollectionN * @param reduceFunction The JavaScript reduce function * @param entityClass The parametrized type of the returned list. Must not be {@literal null}. * @return The results of the map reduce operation + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated MapReduceResults mapReduce(String inputCollectionName, String mapFunction, String reduceFunction, Class entityClass); @@ -593,7 +630,9 @@ MapReduceResults mapReduce(String inputCollectionName, String mapFunction * @param mapReduceOptions Options that specify detailed map-reduce behavior. * @param entityClass The parametrized type of the returned list. Must not be {@literal null}. * @return The results of the map reduce operation + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated MapReduceResults mapReduce(String inputCollectionName, String mapFunction, String reduceFunction, @Nullable MapReduceOptions mapReduceOptions, Class entityClass); @@ -607,7 +646,9 @@ MapReduceResults mapReduce(String inputCollectionName, String mapFunction * @param reduceFunction The JavaScript reduce function * @param entityClass The parametrized type of the returned list. Must not be {@literal null}. * @return The results of the map reduce operation + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated MapReduceResults mapReduce(Query query, String inputCollectionName, String mapFunction, String reduceFunction, Class entityClass); @@ -621,7 +662,9 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin * @param mapReduceOptions Options that specify detailed map-reduce behavior * @param entityClass The parametrized type of the returned list. Must not be {@literal null}. * @return The results of the map reduce operation + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated MapReduceResults mapReduce(Query query, String inputCollectionName, String mapFunction, String reduceFunction, @Nullable MapReduceOptions mapReduceOptions, Class entityClass); @@ -630,69 +673,91 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin * information to determine the collection the query is ran against. Note, that MongoDB limits the number of results * by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a particular number of * results. + *

+ * MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the + * {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using + * aggregations directly: + *

+ * + *
+	 * TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
+	 * 		.withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
+	 * AggregationResults<Document> results = aggregate(geoNear, Document.class);
+	 * 
* * @param near must not be {@literal null}. * @param entityClass must not be {@literal null}. * @return + * @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with + * {@link Aggregation#geoNear(NearQuery, String)} instead. */ + @Deprecated GeoResults geoNear(NearQuery near, Class entityClass); /** * Returns {@link GeoResults} for all entities matching the given {@link NearQuery}. Note, that MongoDB limits the * number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a * particular number of results. + *

+ * MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the + * {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using + * aggregations directly: + *

+ * + *
+	 * TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
+	 * 		.withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
+	 * AggregationResults<Document> results = aggregate(geoNear, Document.class);
+	 * 
* * @param near must not be {@literal null}. * @param entityClass must not be {@literal null}. * @param collectionName the collection to trigger the query against. If no collection name is given the entity class * will be inspected. Must not be {@literal null} nor empty. * @return + * @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with + * {@link Aggregation#geoNear(NearQuery, String)} instead. */ + @Deprecated GeoResults geoNear(NearQuery near, Class entityClass, String collectionName); /** * Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the - * specified type. - *

+ * specified type.
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned list. * @return the converted object. */ - @Nullable - T findOne(Query query, Class entityClass); + @Nullable T findOne(Query query, Class entityClass); /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified - * type. - *

+ * type.
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned list. * @param collectionName name of the collection to retrieve the objects from. * @return the converted object. */ - @Nullable - T findOne(Query query, Class entityClass, String collectionName); + @Nullable T findOne(Query query, Class entityClass, String collectionName); /** * Determine result of given {@link Query} contains at least one element.
* NOTE: Any additional support for query/field mapping, etc. is not available due to the lack of * domain type information. Use {@link #exists(Query, Class, String)} to get full type specific support. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param collectionName name of the collection to check for objects. * @return {@literal true} if the query yields a result. */ @@ -701,7 +766,7 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin /** * Determine result of given {@link Query} contains at least one element. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param entityClass the parametrized type. * @return {@literal true} if the query yields a result. */ @@ -710,7 +775,7 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin /** * Determine result of given {@link Query} contains at least one element. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param entityClass the parametrized type. Can be {@literal null}. * @param collectionName name of the collection to check for objects. * @return {@literal true} if the query yields a result. @@ -718,15 +783,13 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin boolean exists(Query query, @Nullable Class entityClass, String collectionName); /** - * Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type. - *

+ * Map the results of an ad-hoc query on the collection for the entity class to a List of the specified type.
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityClass the parametrized type of the returned list. Must not be {@literal null}. * @return the List of converted objects. @@ -734,15 +797,13 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin List find(Query query, Class entityClass); /** - * Map the results of an ad-hoc query on the specified collection to a List of the specified type. - *

+ * Map the results of an ad-hoc query on the specified collection to a List of the specified type.
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityClass the parametrized type of the returned list. Must not be {@literal null}. * @param collectionName name of the collection to retrieve the objects from. Must not be {@literal null}. @@ -750,6 +811,57 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin */ List find(Query query, Class entityClass, String collectionName); + /** + * Query for a window of objects of type T from the specified collection.
+ * Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with + * {@link Query#limit(int)} to limit large query results for efficient scrolling.
+ * Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used.
+ * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + *

+ * When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort + * sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or + * {@literal null} values through {@code $gt/$lt} operators. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType the parametrized type of the returned window. + * @return the converted window. + * @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid + * position. + * @since 4.1 + * @see Query#with(org.springframework.data.domain.OffsetScrollPosition) + * @see Query#with(org.springframework.data.domain.KeysetScrollPosition) + */ + Window scroll(Query query, Class entityType); + + /** + * Query for a window of objects of type T from the specified collection.
+ * Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with + * {@link Query#limit(int)} to limit large query results for efficient scrolling.
+ * Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used.
+ * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + *

+ * When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort + * sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or + * {@literal null} values through {@code $gt/$lt} operators. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType the parametrized type of the returned window. + * @param collectionName name of the collection to retrieve the objects from. + * @return the converted window. + * @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid + * position. + * @since 4.1 + * @see Query#with(org.springframework.data.domain.OffsetScrollPosition) + * @see Query#with(org.springframework.data.domain.KeysetScrollPosition) + */ + Window scroll(Query query, Class entityType, String collectionName); + /** * Returns a document with the given id mapped onto the given class. The collection the query is ran against will be * derived from the given target class as well. @@ -758,8 +870,7 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin * @param entityClass the type the document shall be converted into. Must not be {@literal null}. * @return the document with the given id mapped onto the given target class. */ - @Nullable - T findById(Object id, Class entityClass); + @Nullable T findById(Object id, Class entityClass); /** * Returns the document with the given id from the given collection mapped onto the given target class. @@ -769,8 +880,7 @@ MapReduceResults mapReduce(Query query, String inputCollectionName, Strin * @param collectionName the collection to query for the document. * @return he converted object or {@literal null} if document does not exist. */ - @Nullable - T findById(Object id, Class entityClass, String collectionName); + @Nullable T findById(Object id, Class entityClass, String collectionName); /** * Finds the distinct values for a specified {@literal field} across a single {@link MongoCollection} or view and @@ -832,138 +942,159 @@ default List findDistinct(Query query, String field, String collection, C } /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. - * @param update the {@link Update} to apply on matching documents. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. * @return the converted object that was updated before it was updated or {@literal null}, if not found. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - @Nullable - T findAndModify(Query query, Update update, Class entityClass); + @Nullable T findAndModify(Query query, UpdateDefinition update, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. - * @param update the {@link Update} to apply on matching documents. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. * @param collectionName the collection to query. Must not be {@literal null}. * @return the converted object that was updated before it was updated or {@literal null}, if not found. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - @Nullable - T findAndModify(Query query, Update update, Class entityClass, String collectionName); + @Nullable T findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. - * @param update the {@link Update} to apply on matching documents. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. + * @param update the {@link UpdateDefinition} to apply on matching documents. * @param options the {@link FindAndModifyOptions} holding additional information. * @param entityClass the parametrized type. * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of * {@link FindAndModifyOptions#isReturnNew()} this will either be the object as it was before the update or as * it is after the update. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - @Nullable - T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass); + @Nullable T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. - * @param update the {@link Update} to apply on matching documents. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. * @param collectionName the collection to query. Must not be {@literal null}. * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of * {@link FindAndModifyOptions#isReturnNew()} this will either be the object as it was before the update or as * it is after the update. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - @Nullable - T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, + @Nullable T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass, String collectionName); /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} * document.
* The collection name is derived from the {@literal replacement} type.
* Options are defaulted to {@link FindAndReplaceOptions#empty()}.
* NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @return the converted object that was updated or {@literal null}, if not found. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. * @since 2.1 */ - @Nullable - default T findAndReplace(Query query, T replacement) { + default @Nullable T findAndReplace(Query query, T replacement) { return findAndReplace(query, replacement, FindAndReplaceOptions.empty()); } /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} * document.
* Options are defaulted to {@link FindAndReplaceOptions#empty()}.
* NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param collectionName the collection to query. Must not be {@literal null}. * @return the converted object that was updated or {@literal null}, if not found. * @since 2.1 */ - @Nullable - default T findAndReplace(Query query, T replacement, String collectionName) { + default @Nullable T findAndReplace(Query query, T replacement, String collectionName) { return findAndReplace(query, replacement, FindAndReplaceOptions.empty(), collectionName); } /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or * as it is after the update. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. * @since 2.1 */ - @Nullable - default T findAndReplace(Query query, T replacement, FindAndReplaceOptions options) { + default @Nullable T findAndReplace(Query query, T replacement, FindAndReplaceOptions options) { return findAndReplace(query, replacement, options, getCollectionName(ClassUtils.getUserClass(replacement))); } /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of @@ -971,22 +1102,21 @@ default T findAndReplace(Query query, T replacement, FindAndReplaceOptions o * as it is after the update. * @since 2.1 */ - @Nullable - default T findAndReplace(Query query, T replacement, FindAndReplaceOptions options, String collectionName) { + default @Nullable T findAndReplace(Query query, T replacement, FindAndReplaceOptions options, String collectionName) { - Assert.notNull(replacement, "Replacement must not be null!"); + Assert.notNull(replacement, "Replacement must not be null"); return findAndReplace(query, replacement, options, (Class) ClassUtils.getUserClass(replacement), collectionName); } /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityType the parametrized type. Must not be {@literal null}. @@ -996,8 +1126,7 @@ default T findAndReplace(Query query, T replacement, FindAndReplaceOptions o * as it is after the update. * @since 2.1 */ - @Nullable - default T findAndReplace(Query query, T replacement, FindAndReplaceOptions options, Class entityType, + default @Nullable T findAndReplace(Query query, T replacement, FindAndReplaceOptions options, Class entityType, String collectionName) { return findAndReplace(query, replacement, options, entityType, collectionName, entityType); @@ -1005,13 +1134,13 @@ default T findAndReplace(Query query, T replacement, FindAndReplaceOptions o /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityType the type used for mapping the {@link Query} to domain type fields and deriving the collection @@ -1021,10 +1150,11 @@ default T findAndReplace(Query query, T replacement, FindAndReplaceOptions o * @return the converted object that was updated or {@literal null}, if not found. Depending on the value of * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or * as it is after the update. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. * @since 2.1 */ - @Nullable - default T findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, + default @Nullable T findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, Class resultType) { return findAndReplace(query, replacement, options, entityType, @@ -1033,13 +1163,13 @@ default T findAndReplace(Query query, S replacement, FindAndReplaceOption /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
* NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityType the type used for mapping the {@link Query} to domain type fields. Must not be {@literal null}. @@ -1051,125 +1181,269 @@ default T findAndReplace(Query query, S replacement, FindAndReplaceOption * as it is after the update. * @since 2.1 */ - @Nullable - T findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, + @Nullable T findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, String collectionName, Class resultType); /** * Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the * specified type. The first document that matches the query is returned and also removed from the collection in the - * database. - *

- * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. - *

+ * database.
+ * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}.
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned list. * @return the converted object */ - @Nullable - T findAndRemove(Query query, Class entityClass); + @Nullable T findAndRemove(Query query, Class entityClass); /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified * type. The first document that matches the query is returned and also removed from the collection in the database. - *

+ *
* The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned list. * @param collectionName name of the collection to retrieve the objects from. * @return the converted object. */ - @Nullable - T findAndRemove(Query query, Class entityClass, String collectionName); + @Nullable T findAndRemove(Query query, Class entityClass, String collectionName); /** * Returns the number of documents for the given {@link Query} by querying the collection of the given entity class. + *
+ * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
+ * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. * * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be * {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. * @return the count of matching documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @see #exactCount(Query, Class) + * @see #estimatedCount(Class) */ long count(Query query, Class entityClass); /** * Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query} * must solely consist of document field references as we lack type information to map potential property references - * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support. + * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
+ * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
+ * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. * * @param query the {@link Query} class that specifies the criteria used to find documents. * @param collectionName must not be {@literal null} or empty. * @return the count of matching documents. * @see #count(Query, Class, String) + * @see #exactCount(Query, String) + * @see #estimatedCount(String) */ long count(Query query, String collectionName); /** * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity - * class to map the given {@link Query}. + * class to map the given {@link Query}.
+ * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
+ * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. * * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be * {@literal null}. * @param entityClass the parametrized type. Can be {@literal null}. * @param collectionName must not be {@literal null} or empty. * @return the count of matching documents. + * @see #count(Query, Class, String) + * @see #estimatedCount(String) */ long count(Query query, @Nullable Class entityClass, String collectionName); /** - * Insert the object into the collection for the entity type of the object to save. - *

- * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. - *

- * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a - * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See - * Spring's Type - * Conversion" for more details. - *

- *

- * Insert is used to initially store the object into the database. To update an existing object use the save method. + * Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type}, + * based on collection statistics.
+ * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside + * transactions. + * + * @param entityClass must not be {@literal null}. + * @return the estimated number of documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.1 + */ + default long estimatedCount(Class entityClass) { + + Assert.notNull(entityClass, "Entity class must not be null"); + return estimatedCount(getCollectionName(entityClass)); + } + + /** + * Estimate the number of documents in the given collection based on collection statistics.
+ * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside + * transactions. + * + * @param collectionName must not be {@literal null}. + * @return the estimated number of documents. + * @since 3.1 + */ + long estimatedCount(String collectionName); + + /** + * Returns the number of documents for the given {@link Query} by querying the collection of the given entity class. + *
+ * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
+ * This method uses an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(Class)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @return the count of matching documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.4 + */ + default long exactCount(Query query, Class entityClass) { + return exactCount(query, entityClass, getCollectionName(entityClass)); + } + + /** + * Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query} + * must solely consist of document field references as we lack type information to map potential property references + * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
+ * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
+ * This method uses an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(String)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @see #count(Query, Class, String) + * @since 3.4 + */ + default long exactCount(Query query, String collectionName) { + return exactCount(query, null, collectionName); + } + + /** + * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity + * class to map the given {@link Query}.
+ * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
+ * This method uses an + * {@link com.mongodb.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(String)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass the parametrized type. Can be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @since 3.4 + */ + long exactCount(Query query, @Nullable Class entityClass, String collectionName); + + /** + * Insert the object into the collection for the entity type of the object to save.
+ * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
+ * If your object has an {@literal Id} property which holds a {@literal null} value, it will be set with the generated + * Id from MongoDB. If your Id property is a String then MongoDB ObjectId will be used to populate that string. + * Otherwise, the conversion from ObjectId to your property type will be handled by Spring's BeanWrapper class that + * leverages Type Conversion API. See + * Spring's + * Type Conversion" for more details.
+ * Insert is used to initially store the object into the database. To update an existing object use the + * {@link #save(Object)} method. + *

+ * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + *

+ * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @return the inserted object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. */ T insert(T objectToSave); /** - * Insert the object into the specified collection. - *

+ * Insert the object into the specified collection.
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

+ * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
* Insert is used to initially store the object into the database. To update an existing object use the save method. + *

+ * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + *

+ * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. * @return the inserted object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. */ T insert(T objectToSave, String collectionName); /** * Insert a Collection of objects into a collection in a single batch write to the database. + *

+ * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

+ * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the batch of objects to save. Must not be {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. * @return the inserted objects that. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ Collection insert(Collection batchToSave, Class entityClass); /** * Insert a batch of objects into the specified collection in a single batch write to the database. + *

+ * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

+ * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the list of objects to save. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. @@ -1180,210 +1454,305 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option /** * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the * class. + *

+ * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

+ * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param objectsToSave the list of objects to save. Must not be {@literal null}. * @return the inserted objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} for the given objects. */ Collection insertAll(Collection objectsToSave); /** * Save the object to the collection for the entity type of the object to save. This will perform an insert if the - * object is not already present, that is an 'upsert'. - *

+ * object is not already present, that is an 'upsert'.
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

- * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
+ * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See - * Spring's Type - * Conversion" for more details. + * Spring's + * Type Conversion" for more details. + *

+ * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. + *

+ * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @return the saved object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ T save(T objectToSave); /** * Save the object to the specified collection. This will perform an insert if the object is not already present, that - * is an 'upsert'. - *

+ * is an 'upsert'.
* The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

- * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
+ * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's Type - * Conversion" for more details. + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * Spring's Type + * Conversion for more details. + *

+ * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. + *

+ * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. * @return the saved object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ T save(T objectToSave, String collectionName); /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by * combining the query document and the update document. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + *

+ * NOTE: {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}. + * Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @see Update + * @see AggregationUpdate + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.0 */ - UpdateResult upsert(Query query, Update update, Class entityClass); + UpdateResult upsert(Query query, UpdateDefinition update, Class entityClass); /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by * combining the query document and the update document.
* NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of - * domain type information. Use {@link #upsert(Query, Update, Class, String)} to get full type specific support. + * domain type information. Use {@link #upsert(Query, UpdateDefinition, Class, String)} to get full type specific + * support.
+ * NOTE: {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}. + * Use {@link #findAndModify(Query, UpdateDefinition, FindAndModifyOptions, Class, String)} instead. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - UpdateResult upsert(Query query, Update update, String collectionName); + UpdateResult upsert(Query query, UpdateDefinition update, String collectionName); /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by * combining the query document and the update document. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - UpdateResult upsert(Query query, Update update, Class entityClass, String collectionName); + UpdateResult upsert(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** * Updates the first object that is found in the collection of the entity class that matches the query document with * the provided update document. - * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param entityClass class that determines the collection to use. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @see Update + * @see AggregationUpdate + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.0 */ - UpdateResult updateFirst(Query query, Update update, Class entityClass); + UpdateResult updateFirst(Query query, UpdateDefinition update, Class entityClass); /** * Updates the first object that is found in the specified collection that matches the query document criteria with * the provided updated document.
* NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of - * domain type information. Use {@link #updateFirst(Query, Update, Class, String)} to get full type specific support. - * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * domain type information. Use {@link #updateFirst(Query, UpdateDefinition, Class, String)} to get full type specific + * support. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - UpdateResult updateFirst(Query query, Update update, String collectionName); + UpdateResult updateFirst(Query query, UpdateDefinition update, String collectionName); /** * Updates the first object that is found in the specified collection that matches the query document criteria with - * the provided updated document.
- * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * the provided updated document. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be auto + * incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - UpdateResult updateFirst(Query query, Update update, Class entityClass, String collectionName); + UpdateResult updateFirst(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** * Updates all objects that are found in the collection for the entity class that matches the query document criteria * with the provided updated document. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be auto + * incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @see Update + * @see AggregationUpdate + * @since 3.0 */ - UpdateResult updateMulti(Query query, Update update, Class entityClass); + UpdateResult updateMulti(Query query, UpdateDefinition update, Class entityClass); /** * Updates all objects that are found in the specified collection that matches the query document criteria with the * provided updated document.
* NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of - * domain type information. Use {@link #updateMulti(Query, Update, Class, String)} to get full type specific support. + * domain type information. Use {@link #updateMulti(Query, UpdateDefinition, Class, String)} to get full type specific + * support. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - UpdateResult updateMulti(Query query, Update update, String collectionName); + UpdateResult updateMulti(Query query, UpdateDefinition update, String collectionName); /** * Updates all objects that are found in the collection for the entity class that matches the query document criteria * with the provided updated document. + *

+ * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be auto + * incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - UpdateResult updateMulti(Query query, Update update, Class entityClass, String collectionName); + UpdateResult updateMulti(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** - * Remove the given object from the collection by id. + * Remove the given object from the collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}.
+ * Use {@link DeleteResult#getDeletedCount()} for insight whether an {@link DeleteResult#wasAcknowledged() + * acknowledged} remove operation was successful or not. * * @param object must not be {@literal null}. * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. */ DeleteResult remove(Object object); /** - * Removes the given object from the given collection. + * Removes the given object from the given collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}.
+ * Use {@link DeleteResult#getDeletedCount()} for insight whether an {@link DeleteResult#wasAcknowledged() + * acknowledged} remove operation was successful or not. * * @param object must not be {@literal null}. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. */ DeleteResult remove(Object object, String collectionName); /** - * Remove all documents that match the provided query document criteria from the the collection used to store the + * Remove all documents that match the provided query document criteria from the collection used to store the * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. * - * @param query the query document that specifies the criteria used to remove a record. + * @param query the query document that specifies the criteria used to remove a document. * @param entityClass class that determines the collection to use. * @return the {@link DeleteResult} which lets you access the results of the previous delete. * @throws IllegalArgumentException when {@literal query} or {@literal entityClass} is {@literal null}. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ DeleteResult remove(Query query, Class entityClass); /** - * Remove all documents that match the provided query document criteria from the the collection used to store the + * Remove all documents that match the provided query document criteria from the collection used to store the * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. * - * @param query the query document that specifies the criteria used to remove a record. + * @param query the query document that specifies the criteria used to remove a document. * @param entityClass class of the pojo to be operated on. Can be {@literal null}. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. * @throws IllegalArgumentException when {@literal query}, {@literal entityClass} or {@literal collectionName} is * {@literal null}. @@ -1396,8 +1765,9 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option * NOTE: Any additional support for field mapping is not available due to the lack of domain type * information. Use {@link #remove(Query, Class, String)} to get full type specific support. * - * @param query the query document that specifies the criteria used to remove a record. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param query the query document that specifies the criteria used to remove a document. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. * @throws IllegalArgumentException when {@literal query} or {@literal collectionName} is {@literal null}. */ @@ -1409,7 +1779,8 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option * information. Use {@link #findAllAndRemove(Query, Class, String)} to get full type specific support. * * @param query the query document that specifies the criteria used to find and remove documents. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link List} converted objects deleted by this operation. * @since 1.5 */ @@ -1421,27 +1792,101 @@ T findAndReplace(Query query, S replacement, FindAndReplaceOptions option * @param query the query document that specifies the criteria used to find and remove documents. * @param entityClass class of the pojo to be operated on. * @return the {@link List} converted objects deleted by this operation. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. * @since 1.5 */ List findAllAndRemove(Query query, Class entityClass); /** - * Returns and removes all documents that match the provided query document criteria from the the collection used to - * store the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in - * the query. + * Returns and removes all documents that match the provided query document criteria from the collection used to store + * the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the + * query. * * @param query the query document that specifies the criteria used to find and remove documents. * @param entityClass class of the pojo to be operated on. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link List} converted objects deleted by this operation. * @since 1.5 */ List findAllAndRemove(Query query, Class entityClass, String collectionName); + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document.
+ * The collection name is derived from the {@literal replacement} type.
+ * Options are defaulted to {@link ReplaceOptions#none()}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + default UpdateResult replace(Query query, T replacement) { + return replace(query, replacement, ReplaceOptions.none()); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document. Options are defaulted to {@link ReplaceOptions#none()}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @since 4.2 + */ + default UpdateResult replace(Query query, T replacement, String collectionName) { + return replace(query, replacement, ReplaceOptions.none(), collectionName); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document taking {@link ReplaceOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document.The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + default UpdateResult replace(Query query, T replacement, ReplaceOptions options) { + return replace(query, replacement, options, getCollectionName(ClassUtils.getUserClass(replacement))); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document taking {@link ReplaceOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may * + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @since 4.2 + */ + UpdateResult replace(Query query, T replacement, ReplaceOptions options, String collectionName); + /** * Returns the underlying {@link MongoConverter}. * - * @return + * @return never {@literal null}. */ MongoConverter getConverter(); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java new file mode 100644 index 0000000000..574c0c8931 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoServerApiFactoryBean.java @@ -0,0 +1,96 @@ +/* + * Copyright 2021-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.jspecify.annotations.Nullable; +import org.springframework.beans.factory.FactoryBean; +import org.springframework.util.ObjectUtils; + +import com.mongodb.ServerApi; +import com.mongodb.ServerApi.Builder; +import com.mongodb.ServerApiVersion; + +/** + * {@link FactoryBean} for creating {@link ServerApi} using the {@link ServerApi.Builder}. + * + * @author Christoph Strobl + * @since 3.3 + */ +public class MongoServerApiFactoryBean implements FactoryBean { + + private @Nullable String version; + private @Nullable Boolean deprecationErrors; + private @Nullable Boolean strict; + + /** + * @param version the version string either as the enum name or the server version value. + * @see ServerApiVersion + */ + public void setVersion(String version) { + this.version = version; + } + + /** + * @param deprecationErrors + * @see ServerApi.Builder#deprecationErrors(boolean) + */ + public void setDeprecationErrors(@Nullable Boolean deprecationErrors) { + this.deprecationErrors = deprecationErrors; + } + + /** + * @param strict + * @see ServerApi.Builder#strict(boolean) + */ + public void setStrict(@Nullable Boolean strict) { + this.strict = strict; + } + + @Override + public @Nullable ServerApi getObject() throws Exception { + + Builder builder = ServerApi.builder().version(version()); + + if (deprecationErrors != null) { + builder = builder.deprecationErrors(deprecationErrors); + } + if (strict != null) { + builder = builder.strict(strict); + } + return builder.build(); + } + + @Nullable + @Override + public Class getObjectType() { + return ServerApi.class; + } + + private ServerApiVersion version() { + + if(version == null) { + return ServerApiVersion.V1; + } + + try { + // lookup by name eg. 'V1' + return ObjectUtils.caseInsensitiveValueOf(ServerApiVersion.values(), version); + } catch (IllegalArgumentException e) { + // or just the version number, eg. just '1' + return ServerApiVersion.findByValue(version); + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java index 311b1712ef..ab03b41424 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/MongoTemplate.java @@ -1,11 +1,11 @@ /* - * Copyright 2010-2018 the original author or authors. + * Copyright 2010-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,22 +17,21 @@ import static org.springframework.data.mongodb.core.query.SerializationUtils.*; -import lombok.AccessLevel; -import lombok.AllArgsConstructor; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; - import java.io.IOException; +import java.math.BigDecimal; +import java.math.RoundingMode; import java.util.*; import java.util.concurrent.TimeUnit; +import java.util.function.BiPredicate; import java.util.stream.Collectors; +import java.util.stream.Stream; -import org.bson.BsonValue; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.bson.Document; -import org.bson.codecs.Codec; import org.bson.conversions.Bson; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.jspecify.annotations.Nullable; + import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; @@ -47,72 +46,83 @@ import org.springframework.dao.OptimisticLockingFailureException; import org.springframework.dao.support.PersistenceExceptionTranslator; import org.springframework.data.convert.EntityReader; +import org.springframework.data.domain.OffsetScrollPosition; +import org.springframework.data.domain.Window; import org.springframework.data.geo.Distance; import org.springframework.data.geo.GeoResult; import org.springframework.data.geo.GeoResults; import org.springframework.data.geo.Metric; -import org.springframework.data.mapping.PropertyPath; -import org.springframework.data.mapping.PropertyReferenceException; +import org.springframework.data.mapping.MappingException; +import org.springframework.data.mapping.callback.EntityCallbacks; import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.MongoDatabaseUtils; -import org.springframework.data.mongodb.MongoDbFactory; import org.springframework.data.mongodb.SessionSynchronization; import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.CollectionPreparerSupport.CollectionPreparerDelegate; import org.springframework.data.mongodb.core.DefaultBulkOperations.BulkOperationContext; import org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity; +import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition; +import org.springframework.data.mongodb.core.QueryOperations.CountContext; +import org.springframework.data.mongodb.core.QueryOperations.DeleteContext; +import org.springframework.data.mongodb.core.QueryOperations.DistinctQueryContext; +import org.springframework.data.mongodb.core.QueryOperations.QueryContext; +import org.springframework.data.mongodb.core.QueryOperations.UpdateContext; +import org.springframework.data.mongodb.core.ScrollUtils.KeysetScrollQuery; import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions.Builder; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; import org.springframework.data.mongodb.core.aggregation.AggregationResults; -import org.springframework.data.mongodb.core.aggregation.Fields; -import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; -import org.springframework.data.mongodb.core.convert.*; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.DefaultDbRefResolver; +import org.springframework.data.mongodb.core.convert.JsonSchemaMapper; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.MongoJsonSchemaMapper; +import org.springframework.data.mongodb.core.convert.MongoWriter; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.index.DefaultSearchIndexOperations; import org.springframework.data.mongodb.core.index.IndexOperations; import org.springframework.data.mongodb.core.index.IndexOperationsProvider; import org.springframework.data.mongodb.core.index.MongoMappingEventPublisher; import org.springframework.data.mongodb.core.index.MongoPersistentEntityIndexCreator; +import org.springframework.data.mongodb.core.index.SearchIndexOperations; +import org.springframework.data.mongodb.core.index.SearchIndexOperationsProvider; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; -import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent; -import org.springframework.data.mongodb.core.mapping.event.AfterDeleteEvent; -import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent; -import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; -import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; -import org.springframework.data.mongodb.core.mapping.event.BeforeDeleteEvent; -import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; -import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; -import org.springframework.data.mongodb.core.mapreduce.GroupBy; -import org.springframework.data.mongodb.core.mapreduce.GroupByResults; +import org.springframework.data.mongodb.core.mapping.event.*; import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; import org.springframework.data.mongodb.core.mapreduce.MapReduceResults; +import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Collation; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Meta; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.data.mongodb.core.timeseries.Granularity; import org.springframework.data.mongodb.core.validation.Validator; -import org.springframework.data.projection.ProjectionInformation; -import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.projection.EntityProjection; import org.springframework.data.util.CloseableIterator; +import org.springframework.data.util.Lazy; import org.springframework.data.util.Optionals; -import org.springframework.jca.cci.core.ConnectionCallback; -import org.springframework.lang.Nullable; +import org.springframework.lang.Contract; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import org.springframework.util.CollectionUtils; +import org.springframework.util.NumberUtils; import org.springframework.util.ObjectUtils; import org.springframework.util.ResourceUtils; import org.springframework.util.StringUtils; import com.mongodb.ClientSessionOptions; -import com.mongodb.Cursor; -import com.mongodb.DBCollection; -import com.mongodb.DBCursor; -import com.mongodb.Mongo; -import com.mongodb.MongoClient; import com.mongodb.MongoException; import com.mongodb.ReadPreference; import com.mongodb.WriteConcern; @@ -121,6 +131,7 @@ import com.mongodb.client.DistinctIterable; import com.mongodb.client.FindIterable; import com.mongodb.client.MapReduceIterable; +import com.mongodb.client.MongoClient; import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoCursor; import com.mongodb.client.MongoDatabase; @@ -130,7 +141,23 @@ import com.mongodb.client.result.UpdateResult; /** - * Primary implementation of {@link MongoOperations}. + * Primary implementation of {@link MongoOperations}. It simplifies the use of imperative MongoDB usage and helps to + * avoid common errors. It executes core MongoDB workflow, leaving application code to provide {@link Document} and + * extract results. This class executes BSON queries or updates, initiating iteration over {@link FindIterable} and + * catching MongoDB exceptions and translating them to the generic, more informative exception hierarchy defined in the + * org.springframework.dao package. Can be used within a service implementation via direct instantiation with a + * {@link MongoDatabaseFactory} reference, or get prepared in an application context and given to services as bean + * reference. + *

+ * Note: The {@link MongoDatabaseFactory} should always be configured as a bean in the application context, in the first + * case given to the service directly, in the second case to the prepared template. + *

{@link ReadPreference} and {@link com.mongodb.ReadConcern}

+ *

+ * {@code ReadPreference} and {@code ReadConcern} are generally considered from {@link Query} and + * {@link AggregationOptions} objects for the action to be executed on a particular {@link MongoCollection}. + *

+ * You can also set the default {@link #setReadPreference(ReadPreference) ReadPreference} on the template level to + * generally apply a {@link ReadPreference}. * * @author Thomas Risberg * @author Graeme Rocher @@ -151,53 +178,46 @@ * @author Borislav Rangelov * @author duozhilin * @author Andreas Zink + * @author Cimon Lucas + * @author Michael J. Simons + * @author Roman Puchkovskiy + * @author Yadhukrishna S Pai + * @author Anton Barkan + * @author Bartłomiej Mazur + * @author Michael Krog + * @author Jakub Zurawa + * @author Florian Lüdiger */ -@SuppressWarnings("deprecation") -public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider { +public class MongoTemplate implements MongoOperations, ApplicationContextAware, IndexOperationsProvider, + SearchIndexOperationsProvider, ReadPreferenceAware { - private static final Logger LOGGER = LoggerFactory.getLogger(MongoTemplate.class); + private static final Log LOGGER = LogFactory.getLog(MongoTemplate.class); private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE; - private static final Collection ITERABLE_CLASSES; - - static { - - Set iterableClasses = new HashSet<>(); - iterableClasses.add(List.class.getName()); - iterableClasses.add(Collection.class.getName()); - iterableClasses.add(Iterator.class.getName()); - - ITERABLE_CLASSES = Collections.unmodifiableCollection(iterableClasses); - } private final MongoConverter mongoConverter; private final MappingContext, MongoPersistentProperty> mappingContext; - private final MongoDbFactory mongoDbFactory; + private final MongoDatabaseFactory mongoDbFactory; private final PersistenceExceptionTranslator exceptionTranslator; private final QueryMapper queryMapper; private final UpdateMapper updateMapper; private final JsonSchemaMapper schemaMapper; - private final SpelAwareProxyProjectionFactory projectionFactory; private final EntityOperations operations; + private final PropertyOperations propertyOperations; + private final QueryOperations queryOperations; + private final EntityLifecycleEventDelegate eventDelegate; private @Nullable WriteConcern writeConcern; private WriteConcernResolver writeConcernResolver = DefaultWriteConcernResolver.INSTANCE; private WriteResultChecking writeResultChecking = WriteResultChecking.NONE; private @Nullable ReadPreference readPreference; private @Nullable ApplicationEventPublisher eventPublisher; + private @Nullable EntityCallbacks entityCallbacks; private @Nullable ResourceLoader resourceLoader; private @Nullable MongoPersistentEntityIndexCreator indexCreator; private SessionSynchronization sessionSynchronization = SessionSynchronization.ON_ACTUAL_TRANSACTION; - /** - * Constructor used for a basic template configuration. - * - * @param mongoClient must not be {@literal null}. - * @param databaseName must not be {@literal null} or empty. - */ - public MongoTemplate(MongoClient mongoClient, String databaseName) { - this(new SimpleMongoDbFactory(mongoClient, databaseName), (MongoConverter) null); - } + private CountExecution countExecution = this::doExactCount; /** * Constructor used for a basic template configuration. @@ -206,8 +226,8 @@ public MongoTemplate(MongoClient mongoClient, String databaseName) { * @param databaseName must not be {@literal null} or empty. * @since 2.1 */ - public MongoTemplate(com.mongodb.client.MongoClient mongoClient, String databaseName) { - this(new SimpleMongoClientDbFactory(mongoClient, databaseName), (MongoConverter) null); + public MongoTemplate(MongoClient mongoClient, String databaseName) { + this(new SimpleMongoClientDatabaseFactory(mongoClient, databaseName), (MongoConverter) null); } /** @@ -215,7 +235,7 @@ public MongoTemplate(com.mongodb.client.MongoClient mongoClient, String database * * @param mongoDbFactory must not be {@literal null}. */ - public MongoTemplate(MongoDbFactory mongoDbFactory) { + public MongoTemplate(MongoDatabaseFactory mongoDbFactory) { this(mongoDbFactory, (MongoConverter) null); } @@ -225,9 +245,9 @@ public MongoTemplate(MongoDbFactory mongoDbFactory) { * @param mongoDbFactory must not be {@literal null}. * @param mongoConverter */ - public MongoTemplate(MongoDbFactory mongoDbFactory, @Nullable MongoConverter mongoConverter) { + public MongoTemplate(MongoDatabaseFactory mongoDbFactory, @Nullable MongoConverter mongoConverter) { - Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null!"); + Assert.notNull(mongoDbFactory, "MongoDbFactory must not be null"); this.mongoDbFactory = mongoDbFactory; this.exceptionTranslator = mongoDbFactory.getExceptionTranslator(); @@ -235,34 +255,48 @@ public MongoTemplate(MongoDbFactory mongoDbFactory, @Nullable MongoConverter mon this.queryMapper = new QueryMapper(this.mongoConverter); this.updateMapper = new UpdateMapper(this.mongoConverter); this.schemaMapper = new MongoJsonSchemaMapper(this.mongoConverter); - this.projectionFactory = new SpelAwareProxyProjectionFactory(); - this.operations = new EntityOperations(this.mongoConverter.getMappingContext()); + this.operations = new EntityOperations(this.mongoConverter, this.queryMapper); + this.propertyOperations = new PropertyOperations(this.mongoConverter.getMappingContext()); + this.queryOperations = new QueryOperations(queryMapper, updateMapper, operations, propertyOperations, + mongoDbFactory); + this.eventDelegate = new EntityLifecycleEventDelegate(); // We always have a mapping context in the converter, whether it's a simple one or not mappingContext = this.mongoConverter.getMappingContext(); // We create indexes based on mapping events - if (mappingContext instanceof MongoMappingContext) { - indexCreator = new MongoPersistentEntityIndexCreator((MongoMappingContext) mappingContext, this); - eventPublisher = new MongoMappingEventPublisher(indexCreator); - if (mappingContext instanceof ApplicationEventPublisherAware) { - ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); + if (mappingContext instanceof MongoMappingContext mappingContext) { + + if (mappingContext.isAutoIndexCreation()) { + + indexCreator = new MongoPersistentEntityIndexCreator(mappingContext, this); + eventPublisher = new MongoMappingEventPublisher(indexCreator); + mappingContext.setApplicationEventPublisher(eventPublisher); } } } - private MongoTemplate(MongoDbFactory dbFactory, MongoTemplate that) { + private MongoTemplate(MongoDatabaseFactory dbFactory, MongoTemplate that) { this.mongoDbFactory = dbFactory; this.exceptionTranslator = that.exceptionTranslator; this.sessionSynchronization = that.sessionSynchronization; - this.mongoConverter = that.mongoConverter instanceof MappingMongoConverter ? getDefaultMongoConverter(dbFactory) - : that.mongoConverter; + + // we need to (re)create the MappingMongoConverter as we need to have it use a DbRefResolver that operates within + // the sames session. Otherwise loading referenced objects would happen outside of it. + if (that.mongoConverter instanceof MappingMongoConverter mappingMongoConverter) { + this.mongoConverter = mappingMongoConverter.with(dbFactory); + } else { + this.mongoConverter = that.mongoConverter; + } + this.queryMapper = that.queryMapper; this.updateMapper = that.updateMapper; this.schemaMapper = that.schemaMapper; - this.projectionFactory = that.projectionFactory; this.mappingContext = that.mappingContext; this.operations = that.operations; + this.propertyOperations = that.propertyOperations; + this.queryOperations = that.queryOperations; + this.eventDelegate = that.eventDelegate; } /** @@ -277,8 +311,7 @@ public void setWriteResultChecking(@Nullable WriteResultChecking resultChecking) /** * Configures the {@link WriteConcern} to be used with the template. If none is configured the {@link WriteConcern} - * configured on the {@link MongoDbFactory} will apply. If you configured a {@link Mongo} instance no - * {@link WriteConcern} will be used. + * configured on the {@link MongoDatabaseFactory} will apply. * * @param writeConcern */ @@ -306,24 +339,102 @@ public void setReadPreference(@Nullable ReadPreference readPreference) { this.readPreference = readPreference; } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) + @Override + public boolean hasReadPreference() { + return this.readPreference != null; + } + + @Override + public @Nullable ReadPreference getReadPreference() { + return this.readPreference; + } + + /** + * Configure whether lifecycle events such as {@link AfterLoadEvent}, {@link BeforeSaveEvent}, etc. should be + * published or whether emission should be suppressed. Enabled by default. + * + * @param enabled {@code true} to enable entity lifecycle events; {@code false} to disable entity lifecycle events. + * @since 4.0 + * @see MongoMappingEvent */ + public void setEntityLifecycleEventsEnabled(boolean enabled) { + this.eventDelegate.setEventsEnabled(enabled); + } + + @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { prepareIndexCreator(applicationContext); eventPublisher = applicationContext; + eventDelegate.setPublisher(eventPublisher); + + if (entityCallbacks == null) { + setEntityCallbacks(EntityCallbacks.create(applicationContext)); + } - if (mappingContext instanceof ApplicationEventPublisherAware) { - ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); + if (mappingContext instanceof ApplicationEventPublisherAware applicationEventPublisherAware) { + applicationEventPublisherAware.setApplicationEventPublisher(eventPublisher); } resourceLoader = applicationContext; + } - projectionFactory.setBeanFactory(applicationContext); - projectionFactory.setBeanClassLoader(applicationContext.getClassLoader()); + /** + * Set the {@link EntityCallbacks} instance to use when invoking + * {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the {@link BeforeSaveCallback}. + *
+ * Overrides potentially existing {@link EntityCallbacks}. + * + * @param entityCallbacks must not be {@literal null}. + * @throws IllegalArgumentException if the given instance is {@literal null}. + * @since 2.2 + */ + public void setEntityCallbacks(EntityCallbacks entityCallbacks) { + + Assert.notNull(entityCallbacks, "EntityCallbacks must not be null"); + this.entityCallbacks = entityCallbacks; + } + + /** + * Configure whether to use estimated count. Defaults to exact counting. + * + * @param enabled use {@link com.mongodb.client.MongoCollection#estimatedDocumentCount()} for unpaged and empty + * {@link Query queries} if {@code true}. + * @since 3.4 + */ + public void useEstimatedCount(boolean enabled) { + useEstimatedCount(enabled, this::countCanBeEstimated); + } + + /** + * Configure whether to use estimated count based on the given {@link BiPredicate estimationFilter}. + * + * @param enabled use {@link com.mongodb.client.MongoCollection#estimatedDocumentCount()} for unpaged and empty + * {@link Query queries} if {@code true}. + * @param estimationFilter the {@link BiPredicate filter}. + * @since 3.4 + */ + private void useEstimatedCount(boolean enabled, BiPredicate estimationFilter) { + + if (enabled) { + + this.countExecution = (collectionPreparer, collectionName, filter, options) -> { + + if (!estimationFilter.test(filter, options)) { + return doExactCount(collectionPreparer, collectionName, filter, options); + } + + EstimatedDocumentCountOptions estimatedDocumentCountOptions = new EstimatedDocumentCountOptions(); + if (options.getMaxTime(TimeUnit.MILLISECONDS) > 0) { + estimatedDocumentCountOptions.maxTime(options.getMaxTime(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS); + } + + return doEstimatedCount(collectionPreparer, collectionName, estimatedDocumentCountOptions); + }; + } else { + this.countExecution = this::doExactCount; + } } /** @@ -345,8 +456,8 @@ private void prepareIndexCreator(ApplicationContext context) { } } - if (context instanceof ConfigurableApplicationContext && indexCreator != null) { - ((ConfigurableApplicationContext) context).addApplicationListener(indexCreator); + if (context instanceof ConfigurableApplicationContext configurableApplicationContext && indexCreator != null) { + configurableApplicationContext.addApplicationListener(indexCreator); } } @@ -355,54 +466,53 @@ private void prepareIndexCreator(ApplicationContext context) { * * @return */ + @Override public MongoConverter getConverter() { return this.mongoConverter; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeAsStream(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ @Override - public CloseableIterator stream(final Query query, final Class entityType) { - - return stream(query, entityType, operations.determineCollectionName(entityType)); + public Stream stream(Query query, Class entityType) { + return stream(query, entityType, getCollectionName(entityType)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#stream(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override - public CloseableIterator stream(final Query query, final Class entityType, final String collectionName) { + public Stream stream(Query query, Class entityType, String collectionName) { return doStream(query, entityType, collectionName, entityType); } - protected CloseableIterator doStream(final Query query, final Class entityType, final String collectionName, - Class returnType) { + @SuppressWarnings({ "ConstantConditions", "NullAway" }) + protected Stream doStream(Query query, Class entityType, String collectionName, Class returnType) { + return doStream(query, entityType, collectionName, returnType, QueryResultConverter.entity()); + } - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(entityType, "Entity type must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(returnType, "ReturnType must not be null!"); + @SuppressWarnings({"ConstantConditions", "NullAway"}) + Stream doStream(Query query, Class entityType, String collectionName, Class returnType, + QueryResultConverter resultConverter) { - return execute(collectionName, new CollectionCallback>() { + Assert.notNull(query, "Query must not be null"); + Assert.notNull(entityType, "Entity type must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(returnType, "ReturnType must not be null"); - @Override - public CloseableIterator doInCollection(MongoCollection collection) - throws MongoException, DataAccessException { + return execute(collectionName, (CollectionCallback>) collection -> { - MongoPersistentEntity persistentEntity = mappingContext.getRequiredPersistentEntity(entityType); + MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(entityType); - Document mappedFields = getMappedFieldsObject(query.getFieldsObject(), persistentEntity, returnType); - Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), persistentEntity); + QueryContext queryContext = queryOperations.createQueryContext(query); + EntityProjection projection = operations.introspectProjection(returnType, entityType); - FindIterable cursor = new QueryCursorPreparer(query, entityType) - .prepare(collection.find(mappedQuery, Document.class).projection(mappedFields)); + Document mappedQuery = queryContext.getMappedQuery(persistentEntity); + Document mappedFields = queryContext.getMappedFields(persistentEntity, projection); - return new CloseableIterableCursorAdapter<>(cursor, exceptionTranslator, - new ProjectingReadCallback<>(mongoConverter, entityType, returnType, collectionName)); - } + CollectionPreparerDelegate readPreference = createDelegate(query); + FindIterable cursor = new QueryCursorPreparer(query, entityType).initiateFind(collection, + col -> readPreference.prepare(col).find(mappedQuery, Document.class).projection(mappedFields)); + + DocumentCallback resultReader = getResultReader(projection, collectionName, resultConverter); + + return new CloseableIterableCursorAdapter<>(cursor, exceptionTranslator, + resultReader).stream(); }); } @@ -411,52 +521,35 @@ public String getCollectionName(Class entityClass) { return this.operations.determineCollectionName(entityClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeCommand(java.lang.String) - */ @Override - public Document executeCommand(final String jsonCommand) { + @SuppressWarnings({ "ConstantConditions", "NullAway" }) + public Document executeCommand(String jsonCommand) { - Assert.hasText(jsonCommand, "JsonCommand must not be null nor empty!"); + Assert.hasText(jsonCommand, "JsonCommand must not be null nor empty"); - return execute(new DbCallback() { - public Document doInDB(MongoDatabase db) throws MongoException, DataAccessException { - return db.runCommand(Document.parse(jsonCommand), Document.class); - } - }); + return execute(db -> db.runCommand(Document.parse(jsonCommand), Document.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeCommand(org.bson.Document) - */ @Override - public Document executeCommand(final Document command) { + @SuppressWarnings({ "ConstantConditions", "NullAway" }) + public Document executeCommand(Document command) { - Assert.notNull(command, "Command must not be null!"); + Assert.notNull(command, "Command must not be null"); return execute(db -> db.runCommand(command, Document.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeCommand(org.bson.Document, com.mongodb.ReadPreference) - */ @Override + @SuppressWarnings({ "ConstantConditions", "NullAway" }) public Document executeCommand(Document command, @Nullable ReadPreference readPreference) { - Assert.notNull(command, "Command must not be null!"); + Assert.notNull(command, "Command must not be null"); return execute(db -> readPreference != null // ? db.runCommand(command, readPreference, Document.class) // : db.runCommand(command, Document.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#executeQuery(org.springframework.data.mongodb.core.query.Query, java.lang.String, org.springframework.data.mongodb.core.DocumentCallbackHandler) - */ @Override public void executeQuery(Query query, String collectionName, DocumentCallbackHandler dch) { executeQuery(query, collectionName, dch, new QueryCursorPreparer(query, null)); @@ -470,36 +563,33 @@ public void executeQuery(Query query, String collectionName, DocumentCallbackHan * specification, must not be {@literal null}. * @param collectionName name of the collection to retrieve the objects from * @param documentCallbackHandler the handler that will extract results, one document at a time - * @param preparer allows for customization of the {@link DBCursor} used when iterating over the result set, (apply - * limits, skips and so on). + * @param preparer allows for customization of the {@link FindIterable} used when iterating over the result set, + * (apply limits, skips and so on). */ protected void executeQuery(Query query, String collectionName, DocumentCallbackHandler documentCallbackHandler, @Nullable CursorPreparer preparer) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(documentCallbackHandler, "DocumentCallbackHandler must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(documentCallbackHandler, "DocumentCallbackHandler must not be null"); Document queryObject = queryMapper.getMappedObject(query.getQueryObject(), Optional.empty()); Document sortObject = query.getSortObject(); Document fieldsObject = query.getFieldsObject(); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing query: {} sort: {} fields: {} in collection: {}", serializeToJsonSafely(queryObject), - sortObject, fieldsObject, collectionName); + LOGGER.debug(String.format("Executing query: %s fields: %s sort: %s in collection: %s", + serializeToJsonSafely(queryObject), fieldsObject, serializeToJsonSafely(sortObject), collectionName)); } - this.executeQueryInternal(new FindCallback(queryObject, fieldsObject), preparer, documentCallbackHandler, - collectionName); + this.executeQueryInternal(new FindCallback(createDelegate(query), queryObject, fieldsObject, null), + preparer != null ? preparer : CursorPreparer.NO_OP_PREPARER, documentCallbackHandler, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#execute(org.springframework.data.mongodb.core.DbCallback) - */ - public T execute(DbCallback action) { + @Override + public @Nullable T execute(DbCallback action) { - Assert.notNull(action, "DbCallback must not be null!"); + Assert.notNull(action, "DbCallback must not be null"); try { MongoDatabase db = prepareDatabase(this.doGetDatabase()); @@ -509,24 +599,18 @@ public T execute(DbCallback action) { } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#execute(java.lang.Class, org.springframework.data.mongodb.core.DbCallback) - */ - public T execute(Class entityClass, CollectionCallback callback) { + @Override + public @Nullable T execute(Class entityClass, CollectionCallback callback) { - Assert.notNull(entityClass, "EntityClass must not be null!"); - return execute(operations.determineCollectionName(entityClass), callback); + Assert.notNull(entityClass, "EntityClass must not be null"); + return execute(getCollectionName(entityClass), callback); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#execute(java.lang.String, org.springframework.data.mongodb.core.DbCallback) - */ - public T execute(String collectionName, CollectionCallback callback) { + @Override + public @Nullable T execute(String collectionName, CollectionCallback callback) { - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(callback, "CollectionCallback must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(callback, "CollectionCallback must not be null"); try { MongoCollection collection = getAndPrepareCollection(doGetDatabase(), collectionName); @@ -536,26 +620,19 @@ public T execute(String collectionName, CollectionCallback callback) { } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#withSession(com.mongodb.ClientSessionOptions) - */ @Override public SessionScoped withSession(ClientSessionOptions options) { - Assert.notNull(options, "ClientSessionOptions must not be null!"); + Assert.notNull(options, "ClientSessionOptions must not be null"); return withSession(() -> mongoDbFactory.getSession(options)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#withSession(com.mongodb.session.ClientSession) - */ @Override + @Contract("_ -> new") public MongoTemplate withSession(ClientSession session) { - Assert.notNull(session, "ClientSession must not be null!"); + Assert.notNull(session, "ClientSession must not be null"); return new SessionBoundMongoTemplate(session, MongoTemplate.this); } @@ -571,174 +648,183 @@ public void setSessionSynchronization(SessionSynchronization sessionSynchronizat this.sessionSynchronization = sessionSynchronization; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.Class) - */ + @Override public MongoCollection createCollection(Class entityClass) { - return createCollection(operations.determineCollectionName(entityClass)); + return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.Class, org.springframework.data.mongodb.core.CollectionOptions) - */ + @Override public MongoCollection createCollection(Class entityClass, @Nullable CollectionOptions collectionOptions) { - Assert.notNull(entityClass, "EntityClass must not be null!"); - return doCreateCollection(operations.determineCollectionName(entityClass), - convertToDocument(collectionOptions, entityClass)); + Assert.notNull(entityClass, "EntityClass must not be null"); + + return doCreateCollection(getCollectionName(entityClass), + operations.convertToCreateCollectionOptions(collectionOptions, entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.String) - */ - public MongoCollection createCollection(final String collectionName) { + @Override + public MongoCollection createCollection(String collectionName) { - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); return doCreateCollection(collectionName, new Document()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#createCollection(java.lang.String, org.springframework.data.mongodb.core.CollectionOptions) - */ - public MongoCollection createCollection(final String collectionName, - final @Nullable CollectionOptions collectionOptions) { + @Override + public MongoCollection createCollection(String collectionName, + @Nullable CollectionOptions collectionOptions) { - Assert.notNull(collectionName, "CollectionName must not be null!"); - return doCreateCollection(collectionName, convertToDocument(collectionOptions)); + Assert.notNull(collectionName, "CollectionName must not be null"); + return doCreateCollection(collectionName, + operations.convertToCreateCollectionOptions(collectionOptions, Object.class)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#getCollection(java.lang.String) - */ - public MongoCollection getCollection(final String collectionName) { + @Override + public MongoCollection createView(String name, Class source, AggregationPipeline pipeline, + @Nullable ViewOptions options) { + + return createView(name, getCollectionName(source), + queryOperations.createAggregation(Aggregation.newAggregation(source, pipeline.getOperations()), source), + options); + } - Assert.notNull(collectionName, "CollectionName must not be null!"); + @Override + public MongoCollection createView(String name, String source, AggregationPipeline pipeline, + @Nullable ViewOptions options) { - return execute(new DbCallback>() { - public MongoCollection doInDB(MongoDatabase db) throws MongoException, DataAccessException { - return db.getCollection(collectionName, Document.class); - } + return createView(name, source, + queryOperations.createAggregation(Aggregation.newAggregation(pipeline.getOperations()), (Class) null), + options); + } + + private MongoCollection createView(String name, String source, AggregationDefinition aggregation, + @Nullable ViewOptions options) { + return doCreateView(name, source, aggregation.getAggregationPipeline(), options); + } + + @SuppressWarnings("NullAway") + protected MongoCollection doCreateView(String name, String source, List pipeline, + @Nullable ViewOptions options) { + + CreateViewOptions viewOptions = new CreateViewOptions(); + if (options != null) { + options.getCollation().map(Collation::toMongoCollation).ifPresent(viewOptions::collation); + } + + return execute(db -> { + db.createView(name, source, pipeline, viewOptions); + return db.getCollection(name); }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#getCollection(java.lang.Class) - */ + @Override + @SuppressWarnings({ "ConstantConditions", "NullAway" }) + @Contract("null -> fail") + public MongoCollection getCollection(@Nullable String collectionName) { + + Assert.notNull(collectionName, "CollectionName must not be null"); + + return execute(db -> db.getCollection(collectionName, Document.class)); + } + + @Override public boolean collectionExists(Class entityClass) { - return collectionExists(operations.determineCollectionName(entityClass)); + return collectionExists(getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#getCollection(java.lang.String) - */ - public boolean collectionExists(final String collectionName) { + @Override + @SuppressWarnings({ "ConstantConditions", "NullAway" }) + public boolean collectionExists(String collectionName) { - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); - return execute(new DbCallback() { - public Boolean doInDB(MongoDatabase db) throws MongoException, DataAccessException { + return execute(db -> { - for (String name : db.listCollectionNames()) { - if (name.equals(collectionName)) { - return true; - } + for (String name : db.listCollectionNames()) { + if (name.equals(collectionName)) { + return true; } - return false; } + return false; }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#dropCollection(java.lang.Class) - */ + @Override public void dropCollection(Class entityClass) { - dropCollection(operations.determineCollectionName(entityClass)); + dropCollection(getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#dropCollection(java.lang.String) - */ + @Override public void dropCollection(String collectionName) { - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); - execute(collectionName, new CollectionCallback() { - public Void doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - collection.drop(); - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Dropped collection [{}]", - collection.getNamespace() != null ? collection.getNamespace().getCollectionName() : collectionName); - } - return null; + execute(collectionName, (CollectionCallback) collection -> { + collection.drop(); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Dropped collection [%s]", + collection.getNamespace() != null ? collection.getNamespace().getCollectionName() : collectionName)); } + return null; }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#indexOps(java.lang.String) - */ + @Override public IndexOperations indexOps(String collectionName) { - return new DefaultIndexOperations(this, collectionName, null); + return indexOps(collectionName, null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#indexOps(java.lang.Class) - */ + @Override + public IndexOperations indexOps(String collectionName, @Nullable Class type) { + return new DefaultIndexOperations(this, collectionName, type); + } + + @Override public IndexOperations indexOps(Class entityClass) { - return new DefaultIndexOperations(this, operations.determineCollectionName(entityClass), entityClass); + return indexOps(getCollectionName(entityClass), entityClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#bulkOps(org.springframework.data.mongodb.core.BulkMode, java.lang.String) - */ - public BulkOperations bulkOps(BulkMode bulkMode, String collectionName) { - return bulkOps(bulkMode, null, collectionName); + @Override + public SearchIndexOperations searchIndexOps(String collectionName) { + return searchIndexOps(null, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#bulkOps(org.springframework.data.mongodb.core.BulkMode, java.lang.Class) - */ + @Override + public SearchIndexOperations searchIndexOps(Class type) { + return new DefaultSearchIndexOperations(this, type); + } + + @Override + public SearchIndexOperations searchIndexOps(@Nullable Class type, String collectionName) { + return new DefaultSearchIndexOperations(this, collectionName, type); + } + + @Override + public BulkOperations bulkOps(BulkMode mode, String collectionName) { + return bulkOps(mode, null, collectionName); + } + + @Override public BulkOperations bulkOps(BulkMode bulkMode, Class entityClass) { - return bulkOps(bulkMode, entityClass, operations.determineCollectionName(entityClass)); + return bulkOps(bulkMode, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#bulkOps(org.springframework.data.mongodb.core.BulkMode, java.lang.Class, java.lang.String) - */ + @Override public BulkOperations bulkOps(BulkMode mode, @Nullable Class entityType, String collectionName) { - Assert.notNull(mode, "BulkMode must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(mode, "BulkMode must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); - DefaultBulkOperations operations = new DefaultBulkOperations(this, collectionName, new BulkOperationContext(mode, - Optional.ofNullable(getPersistentEntity(entityType)), queryMapper, updateMapper)); + DefaultBulkOperations operations = new DefaultBulkOperations(this, collectionName, + new BulkOperationContext(mode, Optional.ofNullable(getPersistentEntity(entityType)), queryMapper, updateMapper, + eventPublisher, entityCallbacks)); - operations.setExceptionTranslator(exceptionTranslator); operations.setDefaultWriteConcern(writeConcern); return operations; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#scriptOps() - */ @Override public ScriptOperations scriptOps() { return new DefaultScriptOperations(this); @@ -749,19 +835,21 @@ public ScriptOperations scriptOps() { @Nullable @Override public T findOne(Query query, Class entityClass) { - return findOne(query, entityClass, operations.determineCollectionName(entityClass)); + return findOne(query, entityClass, getCollectionName(entityClass)); } @Nullable @Override public T findOne(Query query, Class entityClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + + if (ObjectUtils.isEmpty(query.getSortObject())) { - if (ObjectUtils.isEmpty(query.getSortObject()) && !query.getCollation().isPresent()) { - return doFindOne(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass); + return doFindOne(collectionName, createDelegate(query), query.getQueryObject(), query.getFieldsObject(), + new QueryCursorPreparer(query, entityClass), entityClass); } else { query.limit(1); List results = find(query, entityClass, collectionName); @@ -771,7 +859,7 @@ public T findOne(Query query, Class entityClass, String collectionName) { @Override public boolean exists(Query query, Class entityClass) { - return exists(query, entityClass, operations.determineCollectionName(entityClass)); + return exists(query, entityClass, getCollectionName(entityClass)); } @Override @@ -780,102 +868,139 @@ public boolean exists(Query query, String collectionName) { } @Override + @SuppressWarnings({ "ConstantConditions", "NullAway" }) public boolean exists(Query query, @Nullable Class entityClass, String collectionName) { if (query == null) { throw new InvalidDataAccessApiUsageException("Query passed in to exist can't be null"); } - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); - Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), getPersistentEntity(entityClass)); + QueryContext queryContext = queryOperations.createQueryContext(query); + Document mappedQuery = queryContext.getMappedQuery(entityClass, this::getPersistentEntity); return execute(collectionName, - new ExistsCallback(mappedQuery, query.getCollation().map(Collation::toMongoCollation).orElse(null))); + new ExistsCallback(createDelegate(query), mappedQuery, queryContext.getCollation(entityClass).orElse(null))); } // Find methods that take a Query to express the query and that return a List of objects. - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findOne(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ @Override public List find(Query query, Class entityClass) { - return find(query, entityClass, operations.determineCollectionName(entityClass)); + return find(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findOne(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override public List find(Query query, Class entityClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); - return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass, + return doFind(collectionName, createDelegate(query), query.getQueryObject(), query.getFieldsObject(), entityClass, new QueryCursorPreparer(query, entityClass)); } + @Override + public Window scroll(Query query, Class entityType) { + + Assert.notNull(entityType, "Entity type must not be null"); + + return scroll(query, entityType, getCollectionName(entityType)); + } + + @Override + public Window scroll(Query query, Class entityType, String collectionName) { + return doScroll(query, entityType, entityType, QueryResultConverter.entity(), collectionName); + } + + Window doScroll(Query query, Class sourceClass, Class targetClass, + QueryResultConverter resultConverter, String collectionName) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(sourceClass, "Entity type must not be null"); + Assert.notNull(targetClass, "Target type must not be null"); + + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); + DocumentCallback callback = getResultReader(projection, collectionName, resultConverter); + int limit = query.isLimited() ? query.getLimit() + 1 : Integer.MAX_VALUE; + + if (query.hasKeyset()) { + + KeysetScrollQuery keysetPaginationQuery = ScrollUtils.createKeysetPaginationQuery(query, + operations.getIdPropertyName(sourceClass)); + + List result = doFind(collectionName, createDelegate(query), keysetPaginationQuery.query(), + keysetPaginationQuery.fields(), sourceClass, + new QueryCursorPreparer(query, keysetPaginationQuery.sort(), limit, 0, sourceClass), callback); + + return ScrollUtils.createWindow(query, result, sourceClass, operations); + } + + List result = doFind(collectionName, createDelegate(query), query.getQueryObject(), query.getFieldsObject(), + sourceClass, new QueryCursorPreparer(query, query.getSortObject(), limit, query.getSkip(), sourceClass), + callback); + + return ScrollUtils.createWindow(result, query.getLimit(), OffsetScrollPosition.positionFunction(query.getSkip())); + } + @Nullable @Override public T findById(Object id, Class entityClass) { - return findById(id, entityClass, operations.determineCollectionName(entityClass)); + return findById(id, entityClass, getCollectionName(entityClass)); } @Nullable @Override public T findById(Object id, Class entityClass, String collectionName) { - Assert.notNull(id, "Id must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(id, "Id must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); String idKey = operations.getIdPropertyName(entityClass); - return doFindOne(collectionName, new Document(idKey, id), new Document(), entityClass); + return doFindOne(collectionName, CollectionPreparer.identity(), new Document(idKey, id), new Document(), + entityClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findDistinct(org.springframework.data.mongodb.core.query.Query, java.lang.String, java.lang.Class, java.lang.Class) - */ @Override public List findDistinct(Query query, String field, Class entityClass, Class resultClass) { - return findDistinct(query, field, operations.determineCollectionName(entityClass), entityClass, resultClass); + return findDistinct(query, field, getCollectionName(entityClass), entityClass, resultClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findDistinct(org.springframework.data.mongodb.core.query.Query, java.lang.String, java.lang.String, java.lang.Class, java.lang.Class) - */ @Override - @SuppressWarnings("unchecked") + @SuppressWarnings({ "unchecked", "NullAway" }) public List findDistinct(Query query, String field, String collectionName, Class entityClass, Class resultClass) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(field, "Field must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(resultClass, "ResultClass must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(field, "Field must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(resultClass, "ResultClass must not be null"); MongoPersistentEntity entity = entityClass != Object.class ? getPersistentEntity(entityClass) : null; + DistinctQueryContext distinctQueryContext = queryOperations.distinctQueryContext(query, field); - Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), entity); - String mappedFieldName = queryMapper.getMappedFields(new Document(field, 1), entity).keySet().iterator().next(); - - Class mongoDriverCompatibleType = getMongoDbFactory().getCodecFor(resultClass) // - .map(Codec::getEncoderClass) // - .orElse((Class) BsonValue.class); + Document mappedQuery = distinctQueryContext.getMappedQuery(entity); + String mappedFieldName = distinctQueryContext.getMappedFieldName(entity); + Class mongoDriverCompatibleType = distinctQueryContext.getDriverCompatibleClass(resultClass); MongoIterable result = execute(collectionName, (collection) -> { + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Executing findDistinct using query %s for field: %s in collection: %s", + serializeToJsonSafely(mappedQuery), field, collectionName)); + } + + collection = createDelegate(query).prepare(collection); + DistinctIterable iterable = collection.distinct(mappedFieldName, mappedQuery, mongoDriverCompatibleType); + distinctQueryContext.applyCollation(entityClass, iterable::collation); - return query.getCollation().map(Collation::toMongoCollation).map(iterable::collation).orElse(iterable); + return iterable; }); if (resultClass == Object.class || mongoDriverCompatibleType != resultClass) { @@ -884,7 +1009,7 @@ public List findDistinct(Query query, String field, String collectionName DefaultDbRefResolver dbRefResolver = new DefaultDbRefResolver(mongoDbFactory); result = result.map((source) -> converter.mapValueToTargetType(source, - getMostSpecificConversionTargetType(resultClass, entityClass, field), dbRefResolver)); + distinctQueryContext.getMostSpecificConversionTargetType(resultClass, entityClass), dbRefResolver)); } try { @@ -894,35 +1019,9 @@ public List findDistinct(Query query, String field, String collectionName } } - /** - * @param userType must not be {@literal null}. - * @param domainType must not be {@literal null}. - * @param field must not be {@literal null}. - * @return the most specific conversion target type depending on user preference and domain type property. - * @since 2.1 - */ - private static Class getMostSpecificConversionTargetType(Class userType, Class domainType, String field) { - - Class conversionTargetType = userType; - try { - - Class propertyType = PropertyPath.from(field, domainType).getLeafProperty().getLeafType(); - - // use the more specific type but favor UserType over property one - if (ClassUtils.isAssignable(userType, propertyType)) { - conversionTargetType = propertyType; - } - - } catch (PropertyReferenceException e) { - // just don't care about it as we default to Object.class anyway. - } - - return conversionTargetType; - } - @Override public GeoResults geoNear(NearQuery near, Class entityClass) { - return geoNear(near, entityClass, operations.determineCollectionName(entityClass)); + return geoNear(near, entityClass, getCollectionName(entityClass)); } @Override @@ -930,175 +1029,183 @@ public GeoResults geoNear(NearQuery near, Class domainType, String col return geoNear(near, domainType, collectionName, domainType); } - @SuppressWarnings("unchecked") public GeoResults geoNear(NearQuery near, Class domainType, String collectionName, Class returnType) { + return doGeoNear(near, domainType, collectionName, returnType, QueryResultConverter.entity()); + } + + GeoResults doGeoNear(NearQuery near, Class domainType, String collectionName, Class returnType, + QueryResultConverter resultConverter) { if (near == null) { - throw new InvalidDataAccessApiUsageException("NearQuery must not be null!"); + throw new InvalidDataAccessApiUsageException("NearQuery must not be null"); } if (domainType == null) { - throw new InvalidDataAccessApiUsageException("Entity class must not be null!"); + throw new InvalidDataAccessApiUsageException("Entity class must not be null"); } - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(returnType, "ReturnType must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(returnType, "ReturnType must not be null"); - String collection = StringUtils.hasText(collectionName) ? collectionName - : operations.determineCollectionName(domainType); - Document nearDocument = near.toDocument(); + String collection = StringUtils.hasText(collectionName) ? collectionName : getCollectionName(domainType); + String distanceField = operations.nearQueryDistanceFieldName(domainType); - Document command = new Document("geoNear", collection); - command.putAll(nearDocument); + Builder optionsBuilder = AggregationOptions.builder().collation(near.getCollation()); - if (nearDocument.containsKey("query")) { - Document query = (Document) nearDocument.get("query"); - command.put("query", queryMapper.getMappedObject(query, getPersistentEntity(domainType))); + if (near.hasReadPreference()) { + optionsBuilder.readPreference(near.getReadPreference()); } - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing geoNear using: {} for class: {} in collection: {}", serializeToJsonSafely(command), - domainType, collectionName); + if (near.hasReadConcern()) { + optionsBuilder.readConcern(near.getReadConcern()); } - Document commandResult = executeCommand(command, this.readPreference); - List results = (List) commandResult.get("results"); - results = results == null ? Collections.emptyList() : results; + Aggregation $geoNear = TypedAggregation.newAggregation(domainType, Aggregation.geoNear(near, distanceField)) + .withOptions(optionsBuilder.build()); - DocumentCallback> callback = new GeoNearResultDocumentCallback<>( - new ProjectingReadCallback<>(mongoConverter, domainType, returnType, collectionName), near.getMetric()); - List> result = new ArrayList<>(results.size()); + AggregationResults results = aggregate($geoNear, collection, Document.class); + EntityProjection projection = operations.introspectProjection(returnType, domainType); - int index = 0; - long elementsToSkip = near.getSkip() != null ? near.getSkip() : 0; + DocumentCallback> callback = new GeoNearResultDocumentCallback<>(distanceField, + getResultReader(projection, collectionName, resultConverter), near.getMetric()); - for (Object element : results) { + List> result = new ArrayList<>(results.getMappedResults().size()); - /* - * As MongoDB currently (2.4.4) doesn't support the skipping of elements in near queries - * we skip the elements ourselves to avoid at least the document 2 object mapping overhead. - * - * @see MongoDB Jira: SERVER-3925 - */ - if (index >= elementsToSkip) { - result.add(callback.doWith((Document) element)); - } - index++; - } + BigDecimal aggregate = BigDecimal.ZERO; + for (Document element : results) { - if (elementsToSkip > 0) { - // as we skipped some elements we have to calculate the averageDistance ourselves: - return new GeoResults<>(result, near.getMetric()); + GeoResult geoResult = callback.doWith(element); + aggregate = aggregate.add(BigDecimal.valueOf(geoResult.getDistance().getValue())); + result.add(geoResult); } - GeoCommandStatistics stats = GeoCommandStatistics.from(commandResult); - return new GeoResults<>(result, new Distance(stats.getAverageDistance(), near.getMetric())); + Distance avgDistance = Distance.of( + result.size() == 0 ? 0 : aggregate.divide(new BigDecimal(result.size()), RoundingMode.HALF_UP).doubleValue(), + near.getMetric()); + + return new GeoResults<>(result, avgDistance); } - @Nullable - @Override - public T findAndModify(Query query, Update update, Class entityClass) { - return findAndModify(query, update, new FindAndModifyOptions(), entityClass, - operations.determineCollectionName(entityClass)); + public @Nullable T findAndModify(Query query, UpdateDefinition update, Class entityClass) { + return findAndModify(query, update, new FindAndModifyOptions(), entityClass, getCollectionName(entityClass)); } - @Nullable @Override - public T findAndModify(Query query, Update update, Class entityClass, String collectionName) { + public @Nullable T findAndModify(Query query, UpdateDefinition update, Class entityClass, + String collectionName) { return findAndModify(query, update, new FindAndModifyOptions(), entityClass, collectionName); } - @Nullable @Override - public T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass) { - return findAndModify(query, update, options, entityClass, operations.determineCollectionName(entityClass)); + public @Nullable T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, + Class entityClass) { + return findAndModify(query, update, options, entityClass, getCollectionName(entityClass)); } - @Nullable @Override - public T findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, - String collectionName) { + public @Nullable T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, + Class entityClass, String collectionName) { + return findAndModify(query, update, options, entityClass, collectionName, QueryResultConverter.entity()); + } - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); - Assert.notNull(options, "Options must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + @Nullable T findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, + Class entityClass, String collectionName, QueryResultConverter resultConverter) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); + Assert.notNull(options, "Options must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); FindAndModifyOptions optionsToUse = FindAndModifyOptions.of(options); Optionals.ifAllPresent(query.getCollation(), optionsToUse.getCollation(), (l, r) -> { throw new IllegalArgumentException( - "Both Query and FindAndModifyOptions define a collation. Please provide the collation only via one of the two."); + "Both Query and FindAndModifyOptions define a collation; Please provide the collation only via one of the two"); }); - query.getCollation().ifPresent(optionsToUse::collation); + if (!options.getCollation().isPresent()) { + operations.forType(entityClass).getCollation(query).ifPresent(optionsToUse::collation); + } - return doFindAndModify(collectionName, query.getQueryObject(), query.getFieldsObject(), - getMappedSortObject(query, entityClass), entityClass, update, optionsToUse); + return doFindAndModify(createDelegate(query), collectionName, query.getQueryObject(), query.getFieldsObject(), + getMappedSortObject(query, entityClass), entityClass, update, optionsToUse, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findAndReplace(org.springframework.data.mongodb.core.query.Query, java.lang.Object, org.springframework.data.mongodb.core.FindAndReplaceOptions, java.lang.Class, java.lang.String, java.lang.Class) - */ @Override - public T findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, - String collectionName, Class resultType) { + public @Nullable T findAndReplace(Query query, S replacement, FindAndReplaceOptions options, + Class entityType, String collectionName, Class resultType) { + return findAndReplace(query, replacement, options, entityType, collectionName, resultType, QueryResultConverter.entity()); + } + + @Nullable R findAndReplace(Query query, S replacement, FindAndReplaceOptions options, + Class entityType, String collectionName, Class resultType, QueryResultConverter resultConverter) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(replacement, "Replacement must not be null!"); - Assert.notNull(options, "Options must not be null! Use FindAndReplaceOptions#empty() instead."); - Assert.notNull(entityType, "EntityType must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(resultType, "ResultType must not be null! Use Object.class instead."); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null Use FindAndReplaceOptions#empty() instead"); + Assert.notNull(entityType, "EntityType must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(resultType, "ResultType must not be null Use Object.class instead"); - Assert.isTrue(query.getLimit() <= 1, "Query must not define a limit other than 1 ore none!"); - Assert.isTrue(query.getSkip() <= 0, "Query must not define skip."); + Assert.isTrue(query.getLimit() <= 1, "Query must not define a limit other than 1 ore none"); + Assert.isTrue(query.getSkip() <= 0, "Query must not define skip"); MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityType); + QueryContext queryContext = queryOperations.createQueryContext(query); - Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), entity); - Document mappedFields = queryMapper.getMappedFields(query.getFieldsObject(), entity); - Document mappedSort = queryMapper.getMappedSort(query.getSortObject(), entity); + EntityProjection projection = operations.introspectProjection(resultType, entityType); + CollectionPreparerDelegate collectionPreparer = createDelegate(query); + Document mappedQuery = queryContext.getMappedQuery(entity); + Document mappedFields = queryContext.getMappedFields(entity, projection); + Document mappedSort = queryContext.getMappedSort(entity); + replacement = maybeCallBeforeConvert(replacement, collectionName); Document mappedReplacement = operations.forEntity(replacement).toMappedDocument(this.mongoConverter).getDocument(); - return doFindAndReplace(collectionName, mappedQuery, mappedFields, mappedSort, - query.getCollation().map(Collation::toMongoCollation).orElse(null), entityType, mappedReplacement, options, - resultType); + maybeEmitEvent(new BeforeSaveEvent<>(replacement, mappedReplacement, collectionName)); + maybeCallBeforeSave(replacement, mappedReplacement, collectionName); + + R saved = doFindAndReplace(collectionPreparer, collectionName, mappedQuery, mappedFields, mappedSort, + queryContext.getCollation(entityType).orElse(null), entityType, mappedReplacement, options, projection, resultConverter); + + if (saved != null) { + maybeEmitEvent(new AfterSaveEvent<>(saved, mappedReplacement, collectionName)); + return maybeCallAfterSave(saved, mappedReplacement, collectionName); + } + + return saved; } // Find methods that take a Query to express the query and that return a single object that is also removed from the // collection in the database. - @Nullable @Override - public T findAndRemove(Query query, Class entityClass) { - return findAndRemove(query, entityClass, operations.determineCollectionName(entityClass)); + public @Nullable T findAndRemove(Query query, Class entityClass) { + return findAndRemove(query, entityClass, getCollectionName(entityClass)); } - @Nullable @Override - public T findAndRemove(Query query, Class entityClass, String collectionName) { + public @Nullable T findAndRemove(Query query, Class entityClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); - return doFindAndRemove(collectionName, query.getQueryObject(), query.getFieldsObject(), - getMappedSortObject(query, entityClass), query.getCollation().orElse(null), entityClass); + return doFindAndRemove(createDelegate(query), collectionName, query.getQueryObject(), query.getFieldsObject(), + getMappedSortObject(query, entityClass), operations.forType(entityClass).getCollation(query).orElse(null), + entityClass); } @Override public long count(Query query, Class entityClass) { - Assert.notNull(entityClass, "Entity class must not be null!"); - return count(query, entityClass, operations.determineCollectionName(entityClass)); + Assert.notNull(entityClass, "Entity class must not be null"); + return count(query, entityClass, getCollectionName(entityClass)); } @Override - public long count(final Query query, String collectionName) { + public long count(Query query, String collectionName) { return count(query, null, collectionName); } @@ -1106,66 +1213,125 @@ public long count(final Query query, String collectionName) { * (non-Javadoc) * @see org.springframework.data.mongodb.core.MongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) */ + @Override public long count(Query query, @Nullable Class entityClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(query, "Query must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + + CountContext countContext = queryOperations.countQueryContext(query); - CountOptions options = new CountOptions(); - query.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation); + CountOptions options = countContext.getCountOptions(entityClass); + Document mappedQuery = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity); - Document document = queryMapper.getMappedObject(query.getQueryObject(), - Optional.ofNullable(entityClass).map(it -> mappingContext.getPersistentEntity(entityClass))); + CollectionPreparerDelegate readPreference = createDelegate(query); + return doCount(readPreference, collectionName, mappedQuery, options); + } + + protected long doCount(CollectionPreparer collectionPreparer, String collectionName, Document filter, + CountOptions options) { + + if (LOGGER.isDebugEnabled()) { + LOGGER + .debug(String.format("Executing count: %s in collection: %s", serializeToJsonSafely(filter), collectionName)); + } - return execute(collectionName, collection -> collection.count(document, options)); + return countExecution.countDocuments(collectionPreparer, collectionName, filter, options); } /* * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#insert(java.lang.Object) + * @see org.springframework.data.mongodb.core.MongoOperations#estimatedCount(java.lang.String) */ + @Override + public long estimatedCount(String collectionName) { + return doEstimatedCount(CollectionPreparerDelegate.of(this), collectionName, new EstimatedDocumentCountOptions()); + } + + @SuppressWarnings("NullAway") + protected long doEstimatedCount(CollectionPreparer> collectionPreparer, + String collectionName, EstimatedDocumentCountOptions options) { + return execute(collectionName, + collection -> collectionPreparer.prepare(collection).estimatedDocumentCount(options)); + } + + @Override + public long exactCount(Query query, @Nullable Class entityClass, String collectionName) { + + CountContext countContext = queryOperations.countQueryContext(query); + + CountOptions options = countContext.getCountOptions(entityClass); + Document mappedQuery = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity); + + return doExactCount(createDelegate(query), collectionName, mappedQuery, options); + } + + @SuppressWarnings("NullAway") + protected long doExactCount(CollectionPreparer> collectionPreparer, String collectionName, + Document filter, CountOptions options) { + return execute(collectionName, collection -> collectionPreparer.prepare(collection) + .countDocuments(CountQuery.of(filter).toQueryDocument(), options)); + } + + protected boolean countCanBeEstimated(Document filter, CountOptions options) { + + return + // only empty filter for estimatedCount + filter.isEmpty() && + // no skip, no limit,... + isEmptyOptions(options) && + // transaction active? + !MongoDatabaseUtils.isTransactionActive(getMongoDatabaseFactory()); + } + + private boolean isEmptyOptions(CountOptions options) { + return options.getLimit() <= 0 && options.getSkip() <= 0; + } + @Override public T insert(T objectToSave) { - Assert.notNull(objectToSave, "ObjectToSave must not be null!"); + Assert.notNull(objectToSave, "ObjectToSave must not be null"); - ensureNotIterable(objectToSave); - return insert(objectToSave, operations.determineEntityCollectionName(objectToSave)); + ensureNotCollectionLike(objectToSave); + return insert(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#insert(java.lang.Object, java.lang.String) - */ @Override @SuppressWarnings("unchecked") public T insert(T objectToSave, String collectionName) { - Assert.notNull(objectToSave, "ObjectToSave must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(objectToSave, "ObjectToSave must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); - ensureNotIterable(objectToSave); + ensureNotCollectionLike(objectToSave); return (T) doInsert(collectionName, objectToSave, this.mongoConverter); } - protected void ensureNotIterable(@Nullable Object o) { - if (null != o) { - if (o.getClass().isArray() || ITERABLE_CLASSES.contains(o.getClass().getName())) { - throw new IllegalArgumentException("Cannot use a collection here."); - } + /** + * Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or + * {@link Iterator}. + * + * @param source can be {@literal null}. + * @since 3.2. + */ + protected void ensureNotCollectionLike(@Nullable Object source) { + + if (EntityOperations.isCollectionLike(source)) { + throw new IllegalArgumentException("Cannot use a collection here"); } } /** * Prepare the collection before any processing is done using it. This allows a convenient way to apply settings like - * slaveOk() etc. Can be overridden in sub-classes. + * withCodecRegistry() etc. Can be overridden in sub-classes. * * @param collection */ protected MongoCollection prepareCollection(MongoCollection collection) { - if (this.readPreference != null) { - collection = collection.withReadPreference(readPreference); + if (this.readPreference != null && this.readPreference != collection.getReadPreference()) { + return collection.withReadPreference(readPreference); } return collection; @@ -1192,7 +1358,7 @@ private WriteConcern potentiallyForceAcknowledgedWrite(@Nullable WriteConcern wc if (ObjectUtils.nullSafeEquals(WriteResultChecking.EXCEPTION, writeResultChecking)) { if (wc == null || wc.getWObject() == null - || (wc.getWObject() instanceof Number && ((Number) wc.getWObject()).intValue() < 1)) { + || (wc.getWObject() instanceof Number concern && concern.intValue() < 1)) { return WriteConcern.ACKNOWLEDGED; } } @@ -1201,41 +1367,40 @@ private WriteConcern potentiallyForceAcknowledgedWrite(@Nullable WriteConcern wc protected T doInsert(String collectionName, T objectToSave, MongoWriter writer) { - AdaptibleEntity entity = operations.forEntity(objectToSave, mongoConverter.getConversionService()); - T toSave = entity.initializeVersionProperty(); - - BeforeConvertEvent event = new BeforeConvertEvent<>(toSave, collectionName); - toSave = maybeEmitEvent(event).getSource(); + BeforeConvertEvent event = new BeforeConvertEvent<>(objectToSave, collectionName); + T toConvert = maybeEmitEvent(event).getSource(); + toConvert = maybeCallBeforeConvert(toConvert, collectionName); + AdaptibleEntity entity = operations.forEntity(toConvert, mongoConverter.getConversionService()); entity.assertUpdateableIdIfNotSet(); + T initialized = entity.initializeVersionProperty(); Document dbDoc = entity.toMappedDocument(writer).getDocument(); - maybeEmitEvent(new BeforeSaveEvent<>(toSave, dbDoc, collectionName)); - Object id = insertDocument(collectionName, dbDoc, toSave.getClass()); + maybeEmitEvent(new BeforeSaveEvent<>(initialized, dbDoc, collectionName)); + initialized = maybeCallBeforeSave(initialized, dbDoc, collectionName); + Object id = insertDocument(collectionName, dbDoc, initialized.getClass()); - T saved = populateIdIfNecessary(toSave, id); + T saved = populateIdIfNecessary(initialized, id); maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName)); - - return saved; + return maybeCallAfterSave(saved, dbDoc, collectionName); } @Override @SuppressWarnings("unchecked") public Collection insert(Collection batchToSave, Class entityClass) { - Assert.notNull(batchToSave, "BatchToSave must not be null!"); + Assert.notNull(batchToSave, "BatchToSave must not be null"); - return (Collection) doInsertBatch(operations.determineCollectionName(entityClass), batchToSave, - this.mongoConverter); + return (Collection) doInsertBatch(getCollectionName(entityClass), batchToSave, this.mongoConverter); } @Override @SuppressWarnings("unchecked") public Collection insert(Collection batchToSave, String collectionName) { - Assert.notNull(batchToSave, "BatchToSave must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); + Assert.notNull(batchToSave, "BatchToSave must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); return (Collection) doInsertBatch(collectionName, batchToSave, this.mongoConverter); } @@ -1244,7 +1409,7 @@ public Collection insert(Collection batchToSave, String coll @SuppressWarnings("unchecked") public Collection insertAll(Collection objectsToSave) { - Assert.notNull(objectsToSave, "ObjectsToSave must not be null!"); + Assert.notNull(objectsToSave, "ObjectsToSave must not be null"); return (Collection) doInsertAll(objectsToSave, this.mongoConverter); } @@ -1260,15 +1425,8 @@ protected Collection doInsertAll(Collection listToSave, Mong continue; } - MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(element.getClass()); - - String collection = entity.getCollection(); - List collectionElements = elementsByCollection.get(collection); - - if (null == collectionElements) { - collectionElements = new ArrayList<>(); - elementsByCollection.put(collection, collectionElements); - } + String collection = getCollectionName(ClassUtils.getUserClass(element)); + List collectionElements = elementsByCollection.computeIfAbsent(collection, k -> new ArrayList<>()); collectionElements.add(element); } @@ -1283,23 +1441,29 @@ protected Collection doInsertAll(Collection listToSave, Mong protected Collection doInsertBatch(String collectionName, Collection batchToSave, MongoWriter writer) { - Assert.notNull(writer, "MongoWriter must not be null!"); + Assert.notNull(writer, "MongoWriter must not be null"); - List documentList = new ArrayList<>(); + List documentList = new ArrayList<>(batchToSave.size()); List initializedBatchToSave = new ArrayList<>(batchToSave.size()); for (T uninitialized : batchToSave) { - AdaptibleEntity entity = operations.forEntity(uninitialized, mongoConverter.getConversionService()); - T toSave = entity.initializeVersionProperty(); + BeforeConvertEvent event = new BeforeConvertEvent<>(uninitialized, collectionName); + T toConvert = maybeEmitEvent(event).getSource(); + toConvert = maybeCallBeforeConvert(toConvert, collectionName); - BeforeConvertEvent event = new BeforeConvertEvent<>(toSave, collectionName); - toSave = maybeEmitEvent(event).getSource(); + AdaptibleEntity entity = operations.forEntity(toConvert, mongoConverter.getConversionService()); + entity.assertUpdateableIdIfNotSet(); + T initialized = entity.initializeVersionProperty(); Document document = entity.toMappedDocument(writer).getDocument(); + maybeEmitEvent(new BeforeSaveEvent<>(initialized, document, collectionName)); + initialized = maybeCallBeforeSave(initialized, document, collectionName); - maybeEmitEvent(new BeforeSaveEvent<>(toSave, document, collectionName)); - documentList.add(document); - initializedBatchToSave.add(toSave); + MappedDocument mappedDocument = queryOperations.createInsertContext(MappedDocument.of(document)) + .prepareId(uninitialized.getClass()); + + documentList.add(mappedDocument.getDocument()); + initializedBatchToSave.add(initialized); } List ids = insertDocumentList(collectionName, documentList); @@ -1310,8 +1474,9 @@ protected Collection doInsertBatch(String collectionName, Collection(saved, documentList.get(i), collectionName)); - savedObjects.add(saved); + Document doc = documentList.get(i); + maybeEmitEvent(new AfterSaveEvent<>(saved, doc, collectionName)); + savedObjects.add(maybeCallAfterSave(saved, doc, collectionName)); } else { savedObjects.add(obj); } @@ -1324,65 +1489,70 @@ protected Collection doInsertBatch(String collectionName, Collection T save(T objectToSave) { - Assert.notNull(objectToSave, "Object to save must not be null!"); - return save(objectToSave, operations.determineEntityCollectionName(objectToSave)); + Assert.notNull(objectToSave, "Object to save must not be null"); + return save(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave))); } @Override @SuppressWarnings("unchecked") public T save(T objectToSave, String collectionName) { - Assert.notNull(objectToSave, "Object to save must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(objectToSave, "Object to save must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + ensureNotCollectionLike(objectToSave); AdaptibleEntity source = operations.forEntity(objectToSave, mongoConverter.getConversionService()); return source.isVersionedEntity() // ? doSaveVersioned(source, collectionName) // : (T) doSave(collectionName, objectToSave, this.mongoConverter); - } @SuppressWarnings("unchecked") private T doSaveVersioned(AdaptibleEntity source, String collectionName) { - Number number = source.getVersion(); + if (source.isNew()) { + return (T) doInsert(collectionName, source.getBean(), this.mongoConverter); + } - if (number != null) { + // Create query for entity with the id and old version + Query query = source.getQueryForVersion(); - // Create query for entity with the id and old version - Query query = source.getQueryForVersion(); + // Bump version number + T toSave = source.incrementVersion(); - // Bump version number - T toSave = source.incrementVersion(); + toSave = maybeEmitEvent(new BeforeConvertEvent(toSave, collectionName)).getSource(); + toSave = maybeCallBeforeConvert(toSave, collectionName); - toSave = maybeEmitEvent(new BeforeConvertEvent(toSave, collectionName)).getSource(); + if (source.getBean() != toSave) { + source = operations.forEntity(toSave, mongoConverter.getConversionService()); + } - source.assertUpdateableIdIfNotSet(); + source.assertUpdateableIdIfNotSet(); - MappedDocument mapped = source.toMappedDocument(mongoConverter); + MappedDocument mapped = source.toMappedDocument(mongoConverter); - maybeEmitEvent(new BeforeSaveEvent<>(toSave, mapped.getDocument(), collectionName)); - Update update = mapped.updateWithoutId(); + maybeEmitEvent(new BeforeSaveEvent<>(toSave, mapped.getDocument(), collectionName)); + toSave = maybeCallBeforeSave(toSave, mapped.getDocument(), collectionName); + UpdateDefinition update = mapped.updateWithoutId(); - UpdateResult result = doUpdate(collectionName, query, update, toSave.getClass(), false, false); + UpdateResult result = doUpdate(collectionName, query, update, toSave.getClass(), false, false); - if (result.getModifiedCount() == 0) { - throw new OptimisticLockingFailureException( - String.format("Cannot save entity %s with version %s to collection %s. Has it been modified meanwhile?", - source.getId(), number, collectionName)); - } - maybeEmitEvent(new AfterSaveEvent<>(toSave, mapped.getDocument(), collectionName)); + if (result.getModifiedCount() == 0) { - return toSave; + throw new OptimisticLockingFailureException( + String.format("Cannot save entity %s with version %s to collection %s; Has it been modified meanwhile", + source.getId(), source.getVersion(), collectionName)); } + maybeEmitEvent(new AfterSaveEvent<>(toSave, mapped.getDocument(), collectionName)); - return (T) doInsert(collectionName, source.getBean(), this.mongoConverter); + return maybeCallAfterSave(toSave, mapped.getDocument(), collectionName); } protected T doSave(String collectionName, T objectToSave, MongoWriter writer) { objectToSave = maybeEmitEvent(new BeforeConvertEvent<>(objectToSave, collectionName)).getSource(); + objectToSave = maybeCallBeforeConvert(objectToSave, collectionName); AdaptibleEntity entity = operations.forEntity(objectToSave, mongoConverter.getConversionService()); entity.assertUpdateableIdIfNotSet(); @@ -1391,45 +1561,49 @@ protected T doSave(String collectionName, T objectToSave, MongoWriter wri Document dbDoc = mapped.getDocument(); maybeEmitEvent(new BeforeSaveEvent<>(objectToSave, dbDoc, collectionName)); + objectToSave = maybeCallBeforeSave(objectToSave, dbDoc, collectionName); Object id = saveDocument(collectionName, dbDoc, objectToSave.getClass()); - T saved = entity.populateIdIfNecessary(id); + T saved = populateIdIfNecessary(objectToSave, id); maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName)); - return saved; + return maybeCallAfterSave(saved, dbDoc, collectionName); } - protected Object insertDocument(final String collectionName, final Document document, final Class entityClass) { + @SuppressWarnings({ "ConstantConditions", "NullAway" }) + protected Object insertDocument(String collectionName, Document document, Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Inserting Document containing fields: {} in collection: {}", document.keySet(), collectionName); + LOGGER.debug(String.format("Inserting Document containing fields: %s in collection: %s", document.keySet(), + collectionName)); } - return execute(collectionName, new CollectionCallback() { - public Object doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT, collectionName, - entityClass, document, null); - WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + MappedDocument mappedDocument = queryOperations.createInsertContext(MappedDocument.of(document)) + .prepareId(entityClass); - if (writeConcernToUse == null) { - collection.insertOne(document); - } else { - collection.withWriteConcern(writeConcernToUse).insertOne(document); - } + return execute(collectionName, collection -> { + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT, collectionName, entityClass, + mappedDocument.getDocument(), null); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - return operations.forEntity(document).getId(); + if (writeConcernToUse == null) { + collection.insertOne(mappedDocument.getDocument()); + } else { + collection.withWriteConcern(writeConcernToUse).insertOne(mappedDocument.getDocument()); } + + return operations.forEntity(mappedDocument.getDocument()).getId(); }); } - protected List insertDocumentList(final String collectionName, final List documents) { + protected List insertDocumentList(String collectionName, List documents) { if (documents.isEmpty()) { return Collections.emptyList(); } if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Inserting list of Documents containing {} items", documents.size()); + LOGGER.debug(String.format("Inserting list of Documents containing %s items", documents.size())); } execute(collectionName, collection -> { @@ -1450,179 +1624,194 @@ protected List insertDocumentList(final String collectionName, final Lis return MappedDocument.toIds(documents); } - protected Object saveDocument(final String collectionName, final Document dbDoc, final Class entityClass) { + @SuppressWarnings("NullAway") + protected Object saveDocument(String collectionName, Document dbDoc, Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Saving Document containing fields: {}", dbDoc.keySet()); + LOGGER.debug(String.format("Saving Document containing fields: %s", dbDoc.keySet())); } - return execute(collectionName, new CollectionCallback() { - public Object doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.SAVE, collectionName, entityClass, - dbDoc, null); - WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + return execute(collectionName, collection -> { + + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.SAVE, collectionName, entityClass, + dbDoc, null); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + + MappedDocument mapped = MappedDocument.of(dbDoc); + + MongoCollection collectionToUse = writeConcernToUse == null // + ? collection // + : collection.withWriteConcern(writeConcernToUse); - MappedDocument mapped = MappedDocument.of(dbDoc); + if (!mapped.hasId()) { - if (!mapped.hasId()) { - if (writeConcernToUse == null) { - collection.insertOne(dbDoc); + mapped = queryOperations.createInsertContext(mapped).prepareId(mappingContext.getPersistentEntity(entityClass)); + collectionToUse.insertOne(mapped.getDocument()); + } else { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); + UpdateContext updateContext = queryOperations.replaceSingleContext(mapped, true); + Document replacement = updateContext.getMappedUpdate(entity); + Document filter = updateContext.getReplacementQuery(); + if (updateContext.requiresShardKey(filter, entity)) { + + if (entity.getShardKey().isImmutable()) { + filter = updateContext.applyShardKey(entity, filter, null); } else { - collection.withWriteConcern(writeConcernToUse).insertOne(dbDoc); + filter = updateContext.applyShardKey(entity, filter, + collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first()); } - } else if (writeConcernToUse == null) { - collection.replaceOne(mapped.getIdFilter(), dbDoc, new ReplaceOptions().upsert(true)); - } else { - collection.withWriteConcern(writeConcernToUse).replaceOne(mapped.getIdFilter(), dbDoc, - new ReplaceOptions().upsert(true)); } - return mapped.getId(); + + collectionToUse.replaceOne(filter, replacement, new com.mongodb.client.model.ReplaceOptions().upsert(true)); } + return mapped.getId(); }); } @Override - public UpdateResult upsert(Query query, Update update, Class entityClass) { - return doUpdate(operations.determineCollectionName(entityClass), query, update, entityClass, true, false); + public UpdateResult upsert(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, true, false); } @Override - public UpdateResult upsert(Query query, Update update, String collectionName) { + public UpdateResult upsert(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, true, false); } @Override - public UpdateResult upsert(Query query, Update update, Class entityClass, String collectionName) { + public UpdateResult upsert(Query query, UpdateDefinition update, Class entityClass, String collectionName) { - Assert.notNull(entityClass, "EntityClass must not be null!"); + Assert.notNull(entityClass, "EntityClass must not be null"); return doUpdate(collectionName, query, update, entityClass, true, false); } @Override - public UpdateResult updateFirst(Query query, Update update, Class entityClass) { - return doUpdate(operations.determineCollectionName(entityClass), query, update, entityClass, false, false); + public UpdateResult updateFirst(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, false); } @Override - public UpdateResult updateFirst(final Query query, final Update update, final String collectionName) { + public UpdateResult updateFirst(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, false, false); } @Override - public UpdateResult updateFirst(Query query, Update update, Class entityClass, String collectionName) { + public UpdateResult updateFirst(Query query, UpdateDefinition update, Class entityClass, String collectionName) { - Assert.notNull(entityClass, "EntityClass must not be null!"); + Assert.notNull(entityClass, "EntityClass must not be null"); return doUpdate(collectionName, query, update, entityClass, false, false); } @Override - public UpdateResult updateMulti(Query query, Update update, Class entityClass) { - return doUpdate(operations.determineCollectionName(entityClass), query, update, entityClass, false, true); + public UpdateResult updateMulti(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, true); } @Override - public UpdateResult updateMulti(final Query query, final Update update, String collectionName) { + public UpdateResult updateMulti(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, false, true); } @Override - public UpdateResult updateMulti(final Query query, final Update update, Class entityClass, String collectionName) { + public UpdateResult updateMulti(Query query, UpdateDefinition update, Class entityClass, String collectionName) { - Assert.notNull(entityClass, "EntityClass must not be null!"); + Assert.notNull(entityClass, "EntityClass must not be null"); return doUpdate(collectionName, query, update, entityClass, false, true); } - protected UpdateResult doUpdate(final String collectionName, final Query query, final Update update, - @Nullable final Class entityClass, final boolean upsert, final boolean multi) { + @SuppressWarnings({ "ConstantConditions", "NullAway" }) + protected UpdateResult doUpdate(String collectionName, Query query, UpdateDefinition update, + @Nullable Class entityClass, boolean upsert, boolean multi) { - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(update, "Update must not be null"); - return execute(collectionName, new CollectionCallback() { - public UpdateResult doInCollection(MongoCollection collection) - throws MongoException, DataAccessException { + MongoPersistentEntity entity = entityClass == null ? null : getPersistentEntity(entityClass); - MongoPersistentEntity entity = entityClass == null ? null : getPersistentEntity(entityClass); + UpdateContext updateContext = multi ? queryOperations.updateContext(update, query, upsert) + : queryOperations.updateSingleContext(update, query, upsert); + updateContext.increaseVersionForUpdateIfNecessary(entity); - increaseVersionForUpdateIfNecessary(entity, update); + Document queryObj = updateContext.getMappedQuery(entity); + UpdateOptions opts = updateContext.getUpdateOptions(entityClass, query); - UpdateOptions opts = new UpdateOptions(); - opts.upsert(upsert); + if (updateContext.isAggregationUpdate()) { - Document queryObj = new Document(); + List pipeline = updateContext.getUpdatePipeline(entityClass); + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, entityClass, + update.getUpdateObject(), queryObj); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - if (query != null) { + return execute(collectionName, collection -> { - queryObj.putAll(queryMapper.getMappedObject(query.getQueryObject(), entity)); - query.getCollation().map(Collation::toMongoCollation).ifPresent(opts::collation); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s", + serializeToJsonSafely(queryObj), serializeToJsonSafely(pipeline), collectionName)); } - Document updateObj = updateMapper.getMappedObject(update.getUpdateObject(), entity); + collection = writeConcernToUse != null ? collection.withWriteConcern(writeConcernToUse) : collection; - if (multi && update.isIsolated() && !queryObj.containsKey("$isolated")) { - queryObj.put("$isolated", 1); - } + return multi ? collection.updateMany(queryObj, pipeline, opts) : collection.updateOne(queryObj, pipeline, opts); + }); + } - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Calling update using query: {} and update: {} in collection: {}", - serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName); - } + Document updateObj = updateContext.getMappedUpdate(entity); + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, entityClass, + updateObj, queryObj); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, - entityClass, updateObj, queryObj); - WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + return execute(collectionName, collection -> { - collection = writeConcernToUse != null ? collection.withWriteConcern(writeConcernToUse) : collection; + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s", + serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName)); + } - if (!UpdateMapper.isUpdateObject(updateObj)) { + collection = writeConcernToUse != null ? collection.withWriteConcern(writeConcernToUse) : collection; - ReplaceOptions replaceOptions = new ReplaceOptions(); - replaceOptions.collation(opts.getCollation()); - replaceOptions.upsert(opts.isUpsert()); + if (!UpdateMapper.isUpdateObject(updateObj)) { - return collection.replaceOne(queryObj, updateObj, replaceOptions); - } else { - if (multi) { - return collection.updateMany(queryObj, updateObj, opts); + Document filter = new Document(queryObj); + + if (updateContext.requiresShardKey(filter, entity)) { + + if (entity.getShardKey().isImmutable()) { + filter = updateContext.applyShardKey(entity, filter, null); } else { - return collection.updateOne(queryObj, updateObj, opts); + filter = updateContext.applyShardKey(entity, filter, + collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first()); } } - } - }); - } - - private void increaseVersionForUpdateIfNecessary(@Nullable MongoPersistentEntity persistentEntity, Update update) { - if (persistentEntity != null && persistentEntity.hasVersionProperty()) { - String versionFieldName = persistentEntity.getRequiredVersionProperty().getFieldName(); - if (!update.modifies(versionFieldName)) { - update.inc(versionFieldName, 1L); + com.mongodb.client.model.ReplaceOptions replaceOptions = updateContext.getReplaceOptions(entityClass); + return collection.replaceOne(filter, updateObj, replaceOptions); + } else { + return multi ? collection.updateMany(queryObj, updateObj, opts) + : collection.updateOne(queryObj, updateObj, opts); } - } + }); } @Override public DeleteResult remove(Object object) { - Assert.notNull(object, "Object must not be null!"); - - Query query = operations.forEntity(object).getByIdQuery(); + Assert.notNull(object, "Object must not be null"); - return remove(query, object.getClass()); + return remove(object, getCollectionName(object.getClass())); } @Override public DeleteResult remove(Object object, String collectionName) { - Assert.notNull(object, "Object must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(object, "Object must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); - Query query = operations.forEntity(object).getByIdQuery(); + Query query = operations.forEntity(object).getRemoveByQuery(); return doRemove(collectionName, query, object.getClass(), false); } @@ -1634,91 +1823,92 @@ public DeleteResult remove(Query query, String collectionName) { @Override public DeleteResult remove(Query query, Class entityClass) { - return remove(query, entityClass, operations.determineCollectionName(entityClass)); + return remove(query, entityClass, getCollectionName(entityClass)); } @Override public DeleteResult remove(Query query, Class entityClass, String collectionName) { - Assert.notNull(entityClass, "EntityClass must not be null!"); + Assert.notNull(entityClass, "EntityClass must not be null"); return doRemove(collectionName, query, entityClass, true); } - protected DeleteResult doRemove(final String collectionName, final Query query, - @Nullable final Class entityClass, boolean multi) { + @SuppressWarnings({ "ConstantConditions", "NullAway" }) + protected DeleteResult doRemove(String collectionName, Query query, @Nullable Class entityClass, + boolean multi) { - Assert.notNull(query, "Query must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(query, "Query must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); - final MongoPersistentEntity entity = getPersistentEntity(entityClass); - final Document queryObject = queryMapper.getMappedObject(query.getQueryObject(), entity); + MongoPersistentEntity entity = getPersistentEntity(entityClass); - return execute(collectionName, new CollectionCallback() { + DeleteContext deleteContext = multi ? queryOperations.deleteQueryContext(query) + : queryOperations.deleteSingleContext(query); + Document queryObject = deleteContext.getMappedQuery(entity); + DeleteOptions options = deleteContext.getDeleteOptions(entityClass); - public DeleteResult doInCollection(MongoCollection collection) - throws MongoException, DataAccessException { + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, entityClass, + null, queryObject); - maybeEmitEvent(new BeforeDeleteEvent<>(queryObject, entityClass, collectionName)); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - Document removeQuery = queryObject; - - DeleteOptions options = new DeleteOptions(); - query.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation); - - MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, - entityClass, null, queryObject); + return execute(collectionName, collection -> { - WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + maybeEmitEvent(new BeforeDeleteEvent<>(queryObject, entityClass, collectionName)); - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Remove using query: {} in collection: {}.", - new Object[] { serializeToJsonSafely(removeQuery), collectionName }); - } + Document removeQuery = queryObject; - if (query.getLimit() > 0 || query.getSkip() > 0) { + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Remove using query: %s in collection: %s.", serializeToJsonSafely(removeQuery), + collectionName)); + } - MongoCursor cursor = new QueryCursorPreparer(query, entityClass) - .prepare(collection.find(removeQuery).projection(MappedDocument.getIdOnlyProjection())) // - .iterator(); + if (query.getLimit() > 0 || query.getSkip() > 0) { - Set ids = new LinkedHashSet<>(); - while (cursor.hasNext()) { - ids.add(MappedDocument.of(cursor.next()).getId()); - } + MongoCursor cursor = new QueryCursorPreparer(query, entityClass) + .prepare(collection.find(removeQuery).projection(MappedDocument.getIdOnlyProjection())) // + .iterator(); - removeQuery = MappedDocument.getIdIn(ids); + Set ids = new LinkedHashSet<>(); + while (cursor.hasNext()) { + ids.add(MappedDocument.of(cursor.next()).getId()); } - MongoCollection collectionToUse = writeConcernToUse != null - ? collection.withWriteConcern(writeConcernToUse) - : collection; + removeQuery = MappedDocument.getIdIn(ids); + } - DeleteResult result = multi ? collectionToUse.deleteMany(removeQuery, options) - : collection.deleteOne(removeQuery, options); + MongoCollection collectionToUse = writeConcernToUse != null + ? collection.withWriteConcern(writeConcernToUse) + : collection; - maybeEmitEvent(new AfterDeleteEvent<>(queryObject, entityClass, collectionName)); + DeleteResult result = multi ? collectionToUse.deleteMany(removeQuery, options) + : collectionToUse.deleteOne(removeQuery, options); - return result; - } + maybeEmitEvent(new AfterDeleteEvent<>(queryObject, entityClass, collectionName)); + + return result; }); } @Override public List findAll(Class entityClass) { - return findAll(entityClass, operations.determineCollectionName(entityClass)); + return findAll(entityClass, getCollectionName(entityClass)); } @Override public List findAll(Class entityClass, String collectionName) { - return executeFindMultiInternal(new FindCallback(new Document(), new Document()), null, - new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName), collectionName); + return executeFindMultiInternal( + new FindCallback(CollectionPreparer.identity(), new Document(), new Document(), + operations.forType(entityClass).getCollation().map(Collation::toMongoCollation).orElse(null)), + CursorPreparer.NO_OP_PREPARER, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName), + collectionName); } @Override public MapReduceResults mapReduce(String inputCollectionName, String mapFunction, String reduceFunction, Class entityClass) { - return mapReduce(new Query(), inputCollectionName, mapFunction, reduceFunction, - new MapReduceOptions().outputTypeInline(), entityClass); + return mapReduce(new Query(), inputCollectionName, mapFunction, reduceFunction, new MapReduceOptions(), + entityClass); } @Override @@ -1730,8 +1920,7 @@ public MapReduceResults mapReduce(String inputCollectionName, String mapF @Override public MapReduceResults mapReduce(Query query, String inputCollectionName, String mapFunction, String reduceFunction, Class entityClass) { - return mapReduce(query, inputCollectionName, mapFunction, reduceFunction, new MapReduceOptions().outputTypeInline(), - entityClass); + return mapReduce(query, inputCollectionName, mapFunction, reduceFunction, new MapReduceOptions(), entityClass); } @Override @@ -1753,19 +1942,23 @@ public MapReduceResults mapReduce(Query query, String inputCollectionName * @param resultType * @return * @since 2.1 + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated public List mapReduce(Query query, Class domainType, String inputCollectionName, String mapFunction, String reduceFunction, @Nullable MapReduceOptions mapReduceOptions, Class resultType) { - Assert.notNull(domainType, "Domain type must not be null!"); - Assert.notNull(inputCollectionName, "Input collection name must not be null!"); - Assert.notNull(resultType, "Result type must not be null!"); - Assert.notNull(mapFunction, "Map function must not be null!"); - Assert.notNull(reduceFunction, "Reduce function must not be null!"); + Assert.notNull(domainType, "Domain type must not be null"); + Assert.notNull(inputCollectionName, "Input collection name must not be null"); + Assert.notNull(resultType, "Result type must not be null"); + Assert.notNull(mapFunction, "Map function must not be null"); + Assert.notNull(reduceFunction, "Reduce function must not be null"); String mapFunc = replaceWithResourceIfNecessary(mapFunction); String reduceFunc = replaceWithResourceIfNecessary(reduceFunction); - MongoCollection inputCollection = getAndPrepareCollection(doGetDatabase(), inputCollectionName); + CollectionPreparerDelegate readPreference = createDelegate(query); + MongoCollection inputCollection = readPreference + .prepare(getAndPrepareCollection(doGetDatabase(), inputCollectionName)); // MapReduceOp MapReduceIterable mapReduce = inputCollection.mapReduce(mapFunc, reduceFunc, Document.class); @@ -1773,10 +1966,14 @@ public List mapReduce(Query query, Class domainType, String inputColle if (query.getLimit() > 0 && mapReduceOptions != null && mapReduceOptions.getLimit() == null) { mapReduce = mapReduce.limit(query.getLimit()); } - if (query.getMeta().getMaxTimeMsec() != null) { + if (query.getMeta().hasMaxTime()) { mapReduce = mapReduce.maxTime(query.getMeta().getMaxTimeMsec(), TimeUnit.MILLISECONDS); } - mapReduce = mapReduce.sort(getMappedSortObject(query, domainType)); + + Document mappedSort = getMappedSortObject(query, domainType); + if (mappedSort != null && !mappedSort.isEmpty()) { + mapReduce = mapReduce.sort(mappedSort); + } mapReduce = mapReduce .filter(queryMapper.getMappedObject(query.getQueryObject(), mappingContext.getPersistentEntity(domainType))); @@ -1787,7 +1984,7 @@ public List mapReduce(Query query, Class domainType, String inputColle Optionals.ifAllPresent(collation, mapReduceOptions.getCollation(), (l, r) -> { throw new IllegalArgumentException( - "Both Query and MapReduceOptions define a collation. Please provide the collation only via one of the two."); + "Both Query and MapReduceOptions define a collation; Please provide the collation only via one of the two."); }); if (mapReduceOptions.getCollation().isPresent()) { @@ -1797,197 +1994,156 @@ public List mapReduce(Query query, Class domainType, String inputColle if (!CollectionUtils.isEmpty(mapReduceOptions.getScopeVariables())) { mapReduce = mapReduce.scope(new Document(mapReduceOptions.getScopeVariables())); } + if (mapReduceOptions.getLimit() != null && mapReduceOptions.getLimit() > 0) { mapReduce = mapReduce.limit(mapReduceOptions.getLimit()); } + if (mapReduceOptions.getFinalizeFunction().filter(StringUtils::hasText).isPresent()) { mapReduce = mapReduce.finalizeFunction(mapReduceOptions.getFinalizeFunction().get()); } + if (mapReduceOptions.getJavaScriptMode() != null) { mapReduce = mapReduce.jsMode(mapReduceOptions.getJavaScriptMode()); } - if (mapReduceOptions.getOutputSharded().isPresent()) { - mapReduce = mapReduce.sharded(mapReduceOptions.getOutputSharded().get()); - } - } - - mapReduce = collation.map(Collation::toMongoCollation).map(mapReduce::collation).orElse(mapReduce); - - List mappedResults = new ArrayList<>(); - DocumentCallback callback = new ReadDocumentCallback<>(mongoConverter, resultType, inputCollectionName); - - for (Document document : mapReduce) { - mappedResults.add(callback.doWith(document)); - } - - return mappedResults; - } - - public GroupByResults group(String inputCollectionName, GroupBy groupBy, Class entityClass) { - return group(null, inputCollectionName, groupBy, entityClass); - } - - public GroupByResults group(@Nullable Criteria criteria, String inputCollectionName, GroupBy groupBy, - Class entityClass) { - Document document = groupBy.getGroupByObject(); - document.put("ns", inputCollectionName); + if (StringUtils.hasText(mapReduceOptions.getOutputCollection()) && !mapReduceOptions.usesInlineOutput()) { - if (criteria == null) { - document.put("cond", null); - } else { - document.put("cond", queryMapper.getMappedObject(criteria.getCriteriaObject(), Optional.empty())); - } - // If initial document was a JavaScript string, potentially loaded by Spring's Resource abstraction, load it and - // convert to Document + mapReduce = mapReduce.collectionName(mapReduceOptions.getOutputCollection()) + .action(mapReduceOptions.getMapReduceAction()); - if (document.containsKey("initial")) { - Object initialObj = document.get("initial"); - if (initialObj instanceof String) { - String initialAsString = replaceWithResourceIfNecessary((String) initialObj); - document.put("initial", Document.parse(initialAsString)); + if (mapReduceOptions.getOutputDatabase().isPresent()) { + mapReduce = mapReduce.databaseName(mapReduceOptions.getOutputDatabase().get()); + } } } - if (document.containsKey("$reduce")) { - document.put("$reduce", replaceWithResourceIfNecessary(ObjectUtils.nullSafeToString(document.get("$reduce")))); - } - if (document.containsKey("$keyf")) { - document.put("$keyf", replaceWithResourceIfNecessary(ObjectUtils.nullSafeToString(document.get("$keyf")))); - } - if (document.containsKey("finalize")) { - document.put("finalize", replaceWithResourceIfNecessary(ObjectUtils.nullSafeToString(document.get("finalize")))); - } - - Document commandObject = new Document("group", document); - - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing Group with Document [{}]", serializeToJsonSafely(commandObject)); + if (!collation.isPresent()) { + collation = operations.forType(domainType).getCollation(); } - Document commandResult = executeCommand(commandObject, this.readPreference); - - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Group command result = [{}]", commandResult); - } + mapReduce = collation.map(Collation::toMongoCollation).map(mapReduce::collation).orElse(mapReduce); - @SuppressWarnings("unchecked") - Iterable resultSet = (Iterable) commandResult.get("retval"); List mappedResults = new ArrayList<>(); - DocumentCallback callback = new ReadDocumentCallback<>(mongoConverter, entityClass, inputCollectionName); + DocumentCallback callback = new ReadDocumentCallback<>(mongoConverter, resultType, inputCollectionName); - for (Document resultDocument : resultSet) { - mappedResults.add(callback.doWith(resultDocument)); + for (Document document : mapReduce) { + mappedResults.add(callback.doWith(document)); } - return new GroupByResults<>(mappedResults, commandResult); + return mappedResults; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.Class) - */ @Override public AggregationResults aggregate(TypedAggregation aggregation, Class outputType) { - return aggregate(aggregation, operations.determineCollectionName(aggregation.getInputType()), outputType); + + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return aggregate(aggregation, getCollectionName(aggregation.getInputType()), outputType); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.String, java.lang.Class) - */ @Override public AggregationResults aggregate(TypedAggregation aggregation, String inputCollectionName, Class outputType) { - - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - - AggregationOperationContext context = new TypeBasedAggregationOperationContext(aggregation.getInputType(), - mappingContext, queryMapper); - return aggregate(aggregation, inputCollectionName, outputType, context); + return aggregate(aggregation, inputCollectionName, outputType, (AggregationOperationContext) null); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.Class, java.lang.Class) - */ @Override public AggregationResults aggregate(Aggregation aggregation, Class inputType, Class outputType) { - return aggregate(aggregation, operations.determineCollectionName(inputType), outputType, - new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper)); + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return aggregate(aggregation, getCollectionName(inputType), outputType, + queryOperations.createAggregation(aggregation, inputType).getAggregationOperationContext()); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.String, java.lang.Class) - */ @Override public AggregationResults aggregate(Aggregation aggregation, String collectionName, Class outputType) { - return aggregate(aggregation, collectionName, outputType, null); + return doAggregate(aggregation, collectionName, outputType, QueryResultConverter.entity()); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregateStream(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.String, java.lang.Class) - */ @Override - public CloseableIterator aggregateStream(TypedAggregation aggregation, String inputCollectionName, + public Stream aggregateStream(TypedAggregation aggregation, String inputCollectionName, Class outputType) { - - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - - AggregationOperationContext context = new TypeBasedAggregationOperationContext(aggregation.getInputType(), - mappingContext, queryMapper); - return aggregateStream(aggregation, inputCollectionName, outputType, context); + return aggregateStream(aggregation, inputCollectionName, outputType, null); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregateStream(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.Class) - */ @Override - public CloseableIterator aggregateStream(TypedAggregation aggregation, Class outputType) { - return aggregateStream(aggregation, operations.determineCollectionName(aggregation.getInputType()), outputType); + public Stream aggregateStream(TypedAggregation aggregation, Class outputType) { + + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return aggregateStream(aggregation, getCollectionName(aggregation.getInputType()), outputType); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregateStream(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.Class, java.lang.Class) - */ @Override - public CloseableIterator aggregateStream(Aggregation aggregation, Class inputType, Class outputType) { + public Stream aggregateStream(Aggregation aggregation, Class inputType, Class outputType) { - return aggregateStream(aggregation, operations.determineCollectionName(inputType), outputType, - new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper)); + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return aggregateStream(aggregation, getCollectionName(inputType), outputType, + queryOperations.createAggregation(aggregation, inputType).getAggregationOperationContext()); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#aggregateStream(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.String, java.lang.Class) - */ @Override - public CloseableIterator aggregateStream(Aggregation aggregation, String collectionName, Class outputType) { + public Stream aggregateStream(Aggregation aggregation, String collectionName, Class outputType) { return aggregateStream(aggregation, collectionName, outputType, null); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.String) - */ @Override @SuppressWarnings("unchecked") public List findAllAndRemove(Query query, String collectionName) { return (List) findAllAndRemove(query, Object.class, collectionName); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ @Override public List findAllAndRemove(Query query, Class entityClass) { - return findAllAndRemove(query, entityClass, operations.determineCollectionName(entityClass)); + return findAllAndRemove(query, entityClass, getCollectionName(entityClass)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override public List findAllAndRemove(Query query, Class entityClass, String collectionName) { return doFindAndDelete(collectionName, query, entityClass); } + @Override + public UpdateResult replace(Query query, T replacement, ReplaceOptions options, String collectionName) { + + Assert.notNull(replacement, "Replacement must not be null"); + return replace(query, (Class) ClassUtils.getUserClass(replacement), replacement, options, collectionName); + } + + protected UpdateResult replace(Query query, Class entityType, T replacement, ReplaceOptions options, + String collectionName) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null Use ReplaceOptions#none() instead"); + Assert.notNull(entityType, "EntityType must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + + Assert.isTrue(query.getLimit() <= 1, "Query must not define a limit other than 1 ore none"); + Assert.isTrue(query.getSkip() <= 0, "Query must not define skip"); + + UpdateContext updateContext = queryOperations.replaceSingleContext(query, + operations.forEntity(replacement).toMappedDocument(this.mongoConverter), options.isUpsert()); + + replacement = maybeCallBeforeConvert(replacement, collectionName); + Document mappedReplacement = updateContext.getMappedUpdate(mappingContext.getPersistentEntity(entityType)); + maybeEmitEvent(new BeforeSaveEvent<>(replacement, mappedReplacement, collectionName)); + replacement = maybeCallBeforeSave(replacement, mappedReplacement, collectionName); + + MongoAction action = new MongoAction(writeConcern, MongoActionOperation.REPLACE, collectionName, entityType, + mappedReplacement, updateContext.getQueryObject()); + + UpdateResult result = doReplace(options, entityType, collectionName, updateContext, + createCollectionPreparer(query, action), mappedReplacement); + + if (result.wasAcknowledged()) { + + maybeEmitEvent(new AfterSaveEvent<>(replacement, mappedReplacement, collectionName)); + maybeCallAfterSave(replacement, mappedReplacement, collectionName); + } + + return result; + } + /** * Retrieve and remove all documents matching the given {@code query} by calling {@link #find(Query, Class, String)} * and {@link #remove(Query, Class, String)}, whereas the {@link Query} for {@link #remove(Query, Class, String)} is @@ -1999,14 +2155,39 @@ public List findAllAndRemove(Query query, Class entityClass, String co * @return */ protected List doFindAndDelete(String collectionName, Query query, Class entityClass) { + return doFindAndDelete(collectionName, query, entityClass, QueryResultConverter.entity()); + } + + @SuppressWarnings("NullAway") + List doFindAndDelete(String collectionName, Query query, Class entityClass, + QueryResultConverter resultConverter) { - List result = find(query, entityClass, collectionName); + List ids = new ArrayList<>(); + + QueryResultConverterCallback callback = new QueryResultConverterCallback<>(resultConverter, + new ProjectingReadCallback<>(getConverter(), EntityProjection.nonProjecting(entityClass), collectionName)) { + @Override + public T doWith(Document object) { + ids.add(object.get("_id")); + return super.doWith(object); + } + }; + + List result = doFind(collectionName, createDelegate(query), query.getQueryObject(), query.getFieldsObject(), entityClass, + new QueryCursorPreparer(query, entityClass), callback); if (!CollectionUtils.isEmpty(result)) { - Query byIdInQuery = operations.getByIdInQuery(result); + Criteria[] criterias = ids.stream() // + .map(it -> Criteria.where("_id").is(it)) // + .toArray(Criteria[]::new); + + Query removeQuery = new Query(criterias.length == 1 ? criterias[0] : new Criteria().orOperator(criterias)); + if (query.hasReadPreference()) { + removeQuery.withReadPreference(query.getReadPreference()); + } - remove(byIdInQuery, entityClass, collectionName); + remove(removeQuery, entityClass, collectionName); } return result; @@ -2015,19 +2196,38 @@ protected List doFindAndDelete(String collectionName, Query query, Class< protected AggregationResults aggregate(Aggregation aggregation, String collectionName, Class outputType, @Nullable AggregationOperationContext context) { - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - Assert.notNull(outputType, "Output type must not be null!"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + Assert.notNull(outputType, "Output type must not be null"); + + return doAggregate(aggregation, collectionName, outputType, + queryOperations.createAggregation(aggregation, context)); + } + + private AggregationResults doAggregate(Aggregation aggregation, String collectionName, Class outputType, + AggregationDefinition context) { + return doAggregate(aggregation, collectionName, outputType, context.getAggregationOperationContext()); + } + + AggregationResults doAggregate(Aggregation aggregation, String collectionName, Class outputType, + QueryResultConverter resultConverter) { - AggregationOperationContext contextToUse = new AggregationUtil(queryMapper, mappingContext) - .prepareAggregationContext(aggregation, context); - return doAggregate(aggregation, collectionName, outputType, contextToUse); + return doAggregate(aggregation, collectionName, outputType, resultConverter, queryOperations + .createAggregation(aggregation, (AggregationOperationContext) null).getAggregationOperationContext()); } + @SuppressWarnings({ "ConstantConditions", "NullAway" }) protected AggregationResults doAggregate(Aggregation aggregation, String collectionName, Class outputType, AggregationOperationContext context) { + return doAggregate(aggregation, collectionName, outputType, QueryResultConverter.entity(), context); + } + + @SuppressWarnings({"ConstantConditions", "NullAway"}) + AggregationResults doAggregate(Aggregation aggregation, String collectionName, Class outputType, + QueryResultConverter resultConverter, AggregationOperationContext context) { - DocumentCallback callback = new UnwrapAndReadDocumentCallback<>(mongoConverter, outputType, collectionName); + DocumentCallback callback = new QueryResultConverterCallback<>(resultConverter, + new ReadDocumentCallback<>(mongoConverter, outputType, collectionName)); AggregationOptions options = aggregation.getOptions(); AggregationUtil aggregationUtil = new AggregationUtil(queryMapper, mappingContext); @@ -2037,7 +2237,7 @@ protected AggregationResults doAggregate(Aggregation aggregation, String Document command = aggregationUtil.createCommand(collectionName, aggregation, context); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing aggregation: {}", serializeToJsonSafely(command)); + LOGGER.debug(String.format("Executing aggregation: %s", serializeToJsonSafely(command))); } Document commandResult = executeCommand(command); @@ -2048,21 +2248,53 @@ protected AggregationResults doAggregate(Aggregation aggregation, String List pipeline = aggregationUtil.createPipeline(aggregation, context); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing aggregation: {} in collection {}", serializeToJsonSafely(pipeline), collectionName); + LOGGER.debug( + String.format("Executing aggregation: %s in collection %s", serializeToJsonSafely(pipeline), collectionName)); } return execute(collectionName, collection -> { List rawResult = new ArrayList<>(); + CollectionPreparerDelegate delegate = CollectionPreparerDelegate.of(options); + Class domainType = aggregation instanceof TypedAggregation ? ((TypedAggregation) aggregation).getInputType() + : null; + + Optional collation = Optionals.firstNonEmpty(options::getCollation, + () -> operations.forType(domainType) // + .getCollation()); - AggregateIterable aggregateIterable = collection.aggregate(pipeline, Document.class) // - .collation(options.getCollation().map(Collation::toMongoCollation).orElse(null)) // - .allowDiskUse(options.isAllowDiskUse()); + AggregateIterable aggregateIterable = delegate.prepare(collection).aggregate(pipeline, Document.class) // + .collation(collation.map(Collation::toMongoCollation).orElse(null)); + + if (options.isAllowDiskUseSet()) { + aggregateIterable = aggregateIterable.allowDiskUse(options.isAllowDiskUse()); + } if (options.getCursorBatchSize() != null) { aggregateIterable = aggregateIterable.batchSize(options.getCursorBatchSize()); } + options.getComment().ifPresent(aggregateIterable::comment); + HintFunction hintFunction = options.getHintObject().map(HintFunction::from).orElseGet(HintFunction::empty); + if (hintFunction.isPresent()) { + aggregateIterable = hintFunction.apply(mongoDbFactory, aggregateIterable::hintString, aggregateIterable::hint); + } + + if (options.hasExecutionTimeLimit()) { + aggregateIterable = aggregateIterable.maxTime(options.getMaxTime().toMillis(), TimeUnit.MILLISECONDS); + } + + if (options.isSkipResults()) { + + // toCollection only allowed for $out and $merge if those are the last stages + if (aggregation.getPipeline().isOutOrMerge()) { + aggregateIterable.toCollection(); + } else { + aggregateIterable.first(); + } + return new AggregationResults<>(Collections.emptyList(), new Document()); + } + MongoIterable iterable = aggregateIterable.map(val -> { rawResult.add(val); @@ -2074,93 +2306,96 @@ protected AggregationResults doAggregate(Aggregation aggregation, String }); } - protected CloseableIterator aggregateStream(Aggregation aggregation, String collectionName, - Class outputType, @Nullable AggregationOperationContext context) { + protected Stream aggregateStream(Aggregation aggregation, String collectionName, Class outputType, + @Nullable AggregationOperationContext context) { + return doAggregateStream(aggregation, collectionName, outputType, QueryResultConverter.entity(), context); + } + + @SuppressWarnings({ "ConstantConditions", "NullAway" }) + Stream doAggregateStream(Aggregation aggregation, String collectionName, Class outputType, + QueryResultConverter resultConverter, + @Nullable AggregationOperationContext context) { - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - Assert.notNull(outputType, "Output type must not be null!"); - Assert.isTrue(!aggregation.getOptions().isExplain(), "Can't use explain option with streaming!"); + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(outputType, "Output type must not be null"); + Assert.isTrue(!aggregation.getOptions().isExplain(), "Can't use explain option with streaming"); - AggregationUtil aggregationUtil = new AggregationUtil(queryMapper, mappingContext); - AggregationOperationContext rootContext = aggregationUtil.prepareAggregationContext(aggregation, context); + AggregationDefinition aggregationDefinition = queryOperations.createAggregation(aggregation, context); AggregationOptions options = aggregation.getOptions(); - List pipeline = aggregationUtil.createPipeline(aggregation, rootContext); + List pipeline = aggregationDefinition.getAggregationPipeline(); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(pipeline), collectionName); + LOGGER.debug( + String.format("Streaming aggregation: %s in collection %s", serializeToJsonSafely(pipeline), collectionName)); } - ReadDocumentCallback readCallback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName); + DocumentCallback readCallback = new QueryResultConverterCallback<>(resultConverter, + new ReadDocumentCallback<>(mongoConverter, outputType, collectionName)); + + return execute(collectionName, (CollectionCallback>) collection -> { - return execute(collectionName, (CollectionCallback>) collection -> { + CollectionPreparerDelegate delegate = CollectionPreparerDelegate.of(options); - AggregateIterable cursor = collection.aggregate(pipeline, Document.class) // - .allowDiskUse(options.isAllowDiskUse()) // - .useCursor(true); + AggregateIterable cursor = delegate.prepare(collection).aggregate(pipeline, Document.class); + + if (options.isAllowDiskUseSet()) { + cursor = cursor.allowDiskUse(options.isAllowDiskUse()); + } if (options.getCursorBatchSize() != null) { cursor = cursor.batchSize(options.getCursorBatchSize()); } - if (options.getCollation().isPresent()) { - cursor = cursor.collation(options.getCollation().map(Collation::toMongoCollation).get()); + options.getComment().ifPresent(cursor::comment); + HintFunction hintFunction = options.getHintObject().map(HintFunction::from).orElseGet(HintFunction::empty); + if (options.getHintObject().isPresent()) { + cursor = hintFunction.apply(mongoDbFactory, cursor::hintString, cursor::hint); } - return new CloseableIterableCursorAdapter<>(cursor, exceptionTranslator, readCallback); + if (options.hasExecutionTimeLimit()) { + cursor = cursor.maxTime(options.getMaxTime().toMillis(), TimeUnit.MILLISECONDS); + } + + Class domainType = aggregation instanceof TypedAggregation typedAggregation + ? typedAggregation.getInputType() + : null; + + Optionals.firstNonEmpty(options::getCollation, // + () -> operations.forType(domainType).getCollation()) // + .map(Collation::toMongoCollation) // + .ifPresent(cursor::collation); + + return new CloseableIterableCursorAdapter<>(cursor, exceptionTranslator, readCallback).stream(); }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableFindOperation#query(java.lang.Class) - */ @Override public ExecutableFind query(Class domainType) { return new ExecutableFindOperationSupport(this).query(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableUpdateOperation#update(java.lang.Class) - */ @Override public ExecutableUpdate update(Class domainType) { return new ExecutableUpdateOperationSupport(this).update(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableRemoveOperation#remove(java.lang.Class) - */ @Override public ExecutableRemove remove(Class domainType) { return new ExecutableRemoveOperationSupport(this).remove(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation#aggregateAndReturn(java.lang.Class) - */ @Override public ExecutableAggregation aggregateAndReturn(Class domainType) { return new ExecutableAggregationOperationSupport(this).aggregateAndReturn(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableAggregationOperation#aggregateAndReturn(java.lang.Class) - */ @Override public ExecutableMapReduce mapReduce(Class domainType) { return new ExecutableMapReduceOperationSupport(this).mapReduce(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#insert(java.lang.Class) - */ @Override public ExecutableInsert insert(Class domainType) { return new ExecutableInsertOperationSupport(this).insert(domainType); @@ -2168,14 +2403,12 @@ public ExecutableInsert insert(Class domainType) { protected String replaceWithResourceIfNecessary(String function) { - String func = function; - if (this.resourceLoader != null && ResourceUtils.isUrl(function)) { - Resource functionResource = resourceLoader.getResource(func); + Resource functionResource = resourceLoader.getResource(function); if (!functionResource.exists()) { - throw new InvalidDataAccessApiUsageException(String.format("Resource %s not found!", function)); + throw new InvalidDataAccessApiUsageException(String.format("Resource %s not found", function)); } Scanner scanner = null; @@ -2184,7 +2417,7 @@ protected String replaceWithResourceIfNecessary(String function) { scanner = new Scanner(functionResource.getInputStream()); return scanner.useDelimiter("\\A").next(); } catch (IOException e) { - throw new InvalidDataAccessApiUsageException(String.format("Cannot read map-reduce file %s!", function), e); + throw new InvalidDataAccessApiUsageException(String.format("Cannot read map-reduce file %s", function), e); } finally { if (scanner != null) { scanner.close(); @@ -2192,22 +2425,18 @@ protected String replaceWithResourceIfNecessary(String function) { } } - return func; + return function; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ExecutableInsertOperation#getCollectionNames() - */ + @Override + @SuppressWarnings({ "ConstantConditions", "NullAway" }) public Set getCollectionNames() { - return execute(new DbCallback>() { - public Set doInDB(MongoDatabase db) throws MongoException, DataAccessException { - Set result = new LinkedHashSet<>(); - for (String name : db.listCollectionNames()) { - result.add(name); - } - return result; + return execute(db -> { + Set result = new LinkedHashSet<>(); + for (String name : db.listCollectionNames()) { + result.add(name); } + return result; }); } @@ -2224,12 +2453,56 @@ protected MongoDatabase prepareDatabase(MongoDatabase database) { } protected , T> E maybeEmitEvent(E event) { + eventDelegate.publishEvent(event); + return event; + } - if (null != eventPublisher) { - eventPublisher.publishEvent(event); + protected T maybeCallBeforeConvert(T object, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(BeforeConvertCallback.class, object, collection); } - return event; + return object; + } + + protected T maybeCallBeforeSave(T object, Document document, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(BeforeSaveCallback.class, object, document, collection); + } + + return object; + } + + protected T maybeCallAfterSave(T object, Document document, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(AfterSaveCallback.class, object, document, collection); + } + + return object; + } + + protected T maybeCallAfterConvert(T object, Document document, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(AfterConvertCallback.class, object, document, collection); + } + + return object; + } + + /** + * Create the specified collection using the provided options + * + * @param collectionName + * @param collectionOptions + * @return the collection that was created + */ + @SuppressWarnings("ConstantConditions") + protected MongoCollection doCreateCollection(String collectionName, Document collectionOptions) { + return doCreateCollection(collectionName, getCreateCollectionOptions(collectionOptions)); } /** @@ -2238,55 +2511,73 @@ protected , T> E maybeEmitEvent(E event) { * @param collectionName * @param collectionOptions * @return the collection that was created + * @since 3.3.3 */ - protected MongoCollection doCreateCollection(final String collectionName, - final Document collectionOptions) { - return execute(new DbCallback>() { - public MongoCollection doInDB(MongoDatabase db) throws MongoException, DataAccessException { + @SuppressWarnings({ "ConstantConditions", "NullAway" }) + protected MongoCollection doCreateCollection(String collectionName, + CreateCollectionOptions collectionOptions) { - CreateCollectionOptions co = new CreateCollectionOptions(); + return execute(db -> { - if (collectionOptions.containsKey("capped")) { - co.capped((Boolean) collectionOptions.get("capped")); - } - if (collectionOptions.containsKey("size")) { - co.sizeInBytes(((Number) collectionOptions.get("size")).longValue()); - } - if (collectionOptions.containsKey("max")) { - co.maxDocuments(((Number) collectionOptions.get("max")).longValue()); - } + db.createCollection(collectionName, collectionOptions); + + MongoCollection coll = db.getCollection(collectionName, Document.class); + + // TODO: Emit a collection created event + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Created collection [%s]", + coll.getNamespace() != null ? coll.getNamespace().getCollectionName() : collectionName)); + } + return coll; + }); + } + + private CreateCollectionOptions getCreateCollectionOptions(Document document) { - if (collectionOptions.containsKey("collation")) { - co.collation(IndexConverters.fromDocument(collectionOptions.get("collation", Document.class))); - } + CreateCollectionOptions options = new CreateCollectionOptions(); - if (collectionOptions.containsKey("validator")) { + if (document.containsKey("capped")) { + options.capped((Boolean) document.get("capped")); + } + if (document.containsKey("size")) { + options.sizeInBytes(((Number) document.get("size")).longValue()); + } + if (document.containsKey("max")) { + options.maxDocuments(((Number) document.get("max")).longValue()); + } - com.mongodb.client.model.ValidationOptions options = new com.mongodb.client.model.ValidationOptions(); + if (document.containsKey("collation")) { + options.collation(IndexConverters.fromDocument(document.get("collation", Document.class))); + } - if (collectionOptions.containsKey("validationLevel")) { - options.validationLevel(ValidationLevel.fromString(collectionOptions.getString("validationLevel"))); - } - if (collectionOptions.containsKey("validationAction")) { - options.validationAction(ValidationAction.fromString(collectionOptions.getString("validationAction"))); - } + if (document.containsKey("validator")) { - options.validator(collectionOptions.get("validator", Document.class)); - co.validationOptions(options); - } + ValidationOptions validation = new ValidationOptions(); - db.createCollection(collectionName, co); + if (document.containsKey("validationLevel")) { + validation.validationLevel(ValidationLevel.fromString(document.getString("validationLevel"))); + } + if (document.containsKey("validationAction")) { + validation.validationAction(ValidationAction.fromString(document.getString("validationAction"))); + } - MongoCollection coll = db.getCollection(collectionName, Document.class); + validation.validator(document.get("validator", Document.class)); + options.validationOptions(validation); + } - // TODO: Emit a collection created event - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Created collection [{}]", - coll.getNamespace() != null ? coll.getNamespace().getCollectionName() : collectionName); - } - return coll; + if (document.containsKey("timeseries")) { + + Document timeSeries = document.get("timeseries", Document.class); + TimeSeriesOptions timeseries = new TimeSeriesOptions(timeSeries.getString("timeField")); + if (timeSeries.containsKey("metaField")) { + timeseries.metaField(timeSeries.getString("metaField")); } - }); + if (timeSeries.containsKey("granularity")) { + timeseries.granularity(TimeSeriesGranularity.valueOf(timeSeries.getString("granularity").toUpperCase())); + } + options.timeSeriesOptions(timeseries); + } + return options; } /** @@ -2294,23 +2585,50 @@ public MongoCollection doInDB(MongoDatabase db) throws MongoException, * The query document is specified as a standard {@link Document} and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record. * @param fields the document that specifies the fields to be returned. * @param entityClass the parameterized type of the returned list. - * @return the {@link List} of converted objects. + * @return the converted object or {@literal null} if none exists. + */ + @Nullable + protected T doFindOne(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, + Class entityClass) { + return doFindOne(collectionName, collectionPreparer, query, fields, CursorPreparer.NO_OP_PREPARER, entityClass); + } + + /** + * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. + * The query document is specified as a standard {@link Document} and so is the fields specification. + * + * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param query the query document that specifies the criteria used to find a record. + * @param fields the document that specifies the fields to be returned. + * @param preparer the preparer used to modify the cursor on execution. + * @param entityClass the parameterized type of the returned list. + * @return the converted object or {@literal null} if none exists. + * @since 2.2 */ - protected T doFindOne(String collectionName, Document query, Document fields, Class entityClass) { + @Nullable + @SuppressWarnings("ConstantConditions") + protected T doFindOne(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, + CursorPreparer preparer, Class entityClass) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - Document mappedQuery = queryMapper.getMappedObject(query, entity); - Document mappedFields = queryMapper.getMappedObject(fields, entity); + + QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); + Document mappedFields = queryContext.getMappedFields(entity, EntityProjection.nonProjecting(entityClass)); + Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("findOne using query: {} fields: {} for class: {} in collection: {}", serializeToJsonSafely(query), - mappedFields, entityClass, collectionName); + LOGGER.debug(String.format("findOne using query: %s fields: %s for class: %s in collection: %s", + serializeToJsonSafely(query), mappedFields, entityClass, collectionName)); } - return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields), + return executeFindOneInternal(new FindOneCallback(collectionPreparer, mappedQuery, mappedFields, preparer), new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); } @@ -2319,13 +2637,15 @@ protected T doFindOne(String collectionName, Document query, Document fields * query document is specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record * @param fields the document that specifies the fields to be returned * @param entityClass the parameterized type of the returned list. * @return the List of converted objects. */ - protected List doFind(String collectionName, Document query, Document fields, Class entityClass) { - return doFind(collectionName, query, fields, entityClass, null, + protected List doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass) { + return doFind(collectionName, collectionPreparer, query, fields, entityClass, null, new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName)); } @@ -2335,34 +2655,42 @@ protected List doFind(String collectionName, Document query, Document fie * specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record. * @param fields the document that specifies the fields to be returned. * @param entityClass the parameterized type of the returned list. - * @param preparer allows for customization of the {@link DBCursor} used when iterating over the result set, (apply - * limits, skips and so on). + * @param preparer allows for customization of the {@link FindIterable} used when iterating over the result set, + * (apply limits, skips and so on). * @return the {@link List} of converted objects. */ - protected List doFind(String collectionName, Document query, Document fields, Class entityClass, - CursorPreparer preparer) { - return doFind(collectionName, query, fields, entityClass, preparer, + protected List doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass, CursorPreparer preparer) { + return doFind(collectionName, collectionPreparer, query, fields, entityClass, preparer, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName)); } - protected List doFind(String collectionName, Document query, Document fields, Class entityClass, - @Nullable CursorPreparer preparer, DocumentCallback objectCallback) { + protected List doFind(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, + Class entityClass, @Nullable CursorPreparer preparer, DocumentCallback objectCallback) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - Document mappedFields = queryMapper.getMappedFields(fields, entity); - Document mappedQuery = queryMapper.getMappedObject(query, entity); + QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); + Document mappedFields = queryContext.getMappedFields(entity, EntityProjection.nonProjecting(entityClass)); + Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}", - serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName); + + Document mappedSort = preparer instanceof SortingQueryCursorPreparer sqcp + ? getMappedSortObject(sqcp.getSortObject(), entity) + : null; + LOGGER.debug(String.format("find using query: %s fields: %s sort: %s for class: %s in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, serializeToJsonSafely(mappedSort), entityClass, + collectionName)); } - return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, objectCallback, - collectionName); + return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields, null), + preparer != null ? preparer : CursorPreparer.NO_OP_PREPARER, objectCallback, collectionName); } /** @@ -2371,29 +2699,35 @@ protected List doFind(String collectionName, Document query, Document * * @since 2.0 */ - List doFind(String collectionName, Document query, Document fields, Class sourceClass, - Class targetClass, CursorPreparer preparer) { + List doFind(CollectionPreparer> collectionPreparer, String collectionName, + Document query, Document fields, Class sourceClass, Class targetClass, + QueryResultConverter resultConverter, CursorPreparer preparer) { - MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(sourceClass); + MongoPersistentEntity entity = mappingContext.getPersistentEntity(sourceClass); + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); - Document mappedFields = getMappedFieldsObject(fields, entity, targetClass); - Document mappedQuery = queryMapper.getMappedObject(query, entity); + QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); + Document mappedFields = queryContext.getMappedFields(entity, projection); + Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}", - serializeToJsonSafely(mappedQuery), mappedFields, sourceClass, collectionName); + + Document mappedSort = preparer instanceof SortingQueryCursorPreparer sqcp + ? getMappedSortObject(sqcp.getSortObject(), entity) + : null; + LOGGER.debug(String.format("find using query: %s fields: %s sort: %s for class: %s in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, serializeToJsonSafely(mappedSort), sourceClass, + collectionName)); } - return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, - new ProjectingReadCallback<>(mongoConverter, sourceClass, targetClass, collectionName), collectionName); + DocumentCallback callback = getResultReader(projection, collectionName, resultConverter); + return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields, null), preparer, + callback, collectionName); } /** * Convert given {@link CollectionOptions} to a document and take the domain type information into account when * creating a mapped schema for validation.
- * This method calls {@link #convertToDocument(CollectionOptions)} for backwards compatibility and potentially - * overwrites the validator with the mapped validator document. In the long run - * {@link #convertToDocument(CollectionOptions)} will be removed so that this one becomes the only source of truth. * * @param collectionOptions can be {@literal null}. * @param targetType must not be {@literal null}. Use {@link Object} type instead. @@ -2402,43 +2736,41 @@ List doFind(String collectionName, Document query, Document fields, Cl */ protected Document convertToDocument(@Nullable CollectionOptions collectionOptions, Class targetType) { - Document doc = convertToDocument(collectionOptions); - - if (collectionOptions != null) { - - collectionOptions.getValidationOptions().ifPresent(it -> it.getValidator() // - .ifPresent(val -> doc.put("validator", getMappedValidator(val, targetType)))); + if (collectionOptions == null) { + return new Document(); } - return doc; - } - - /** - * @param collectionOptions can be {@literal null}. - * @return never {@literal null}. - * @deprecated since 2.1 in favor of {@link #convertToDocument(CollectionOptions, Class)}. - */ - @Deprecated - protected Document convertToDocument(@Nullable CollectionOptions collectionOptions) { + Document doc = new Document(); + collectionOptions.getCapped().ifPresent(val -> doc.put("capped", val)); + collectionOptions.getSize().ifPresent(val -> doc.put("size", val)); + collectionOptions.getMaxDocuments().ifPresent(val -> doc.put("max", val)); + collectionOptions.getCollation().ifPresent(val -> doc.append("collation", val.toDocument())); - Document document = new Document(); + collectionOptions.getValidationOptions().ifPresent(it -> { - if (collectionOptions != null) { + it.getValidationLevel().ifPresent(val -> doc.append("validationLevel", val.getValue())); + it.getValidationAction().ifPresent(val -> doc.append("validationAction", val.getValue())); + it.getValidator().ifPresent(val -> doc.append("validator", getMappedValidator(val, targetType))); + }); - collectionOptions.getCapped().ifPresent(val -> document.put("capped", val)); - collectionOptions.getSize().ifPresent(val -> document.put("size", val)); - collectionOptions.getMaxDocuments().ifPresent(val -> document.put("max", val)); - collectionOptions.getCollation().ifPresent(val -> document.append("collation", val.toDocument())); + collectionOptions.getTimeSeriesOptions().map(operations.forType(targetType)::mapTimeSeriesOptions).ifPresent(it -> { - collectionOptions.getValidationOptions().ifPresent(it -> { + Document timeseries = new Document("timeField", it.getTimeField()); + if (StringUtils.hasText(it.getMetaField())) { + timeseries.append("metaField", it.getMetaField()); + } + if (!Granularity.DEFAULT.equals(it.getGranularity())) { + timeseries.append("granularity", it.getGranularity().name().toLowerCase()); + } + doc.put("timeseries", timeseries); + }); - it.getValidationLevel().ifPresent(val -> document.append("validationLevel", val.getValue())); - it.getValidationAction().ifPresent(val -> document.append("validationAction", val.getValue())); - it.getValidator().ifPresent(val -> document.append("validator", getMappedValidator(val, Object.class))); - }); - } + collectionOptions.getChangeStreamOptions().map(it -> new Document("enabled", it.getPreAndPostImages())) + .ifPresent(it -> { + doc.put("changeStreamPreAndPostImages", it); + }); - return document; + return doc; } Document getMappedValidator(Validator validator, Class domainType) { @@ -2454,8 +2786,7 @@ Document getMappedValidator(Validator validator, Class domainType) { /** * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. - * The first document that matches the query is returned and also removed from the collection in the database. - *

+ * The first document that matches the query is returned and also removed from the collection in the database.
* The query document is specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from @@ -2463,27 +2794,28 @@ Document getMappedValidator(Validator validator, Class domainType) { * @param entityClass the parameterized type of the returned list. * @return the List of converted objects. */ - protected T doFindAndRemove(String collectionName, Document query, Document fields, Document sort, - @Nullable Collation collation, Class entityClass) { - - EntityReader readerToUse = this.mongoConverter; + @SuppressWarnings("ConstantConditions") + protected @Nullable T doFindAndRemove(CollectionPreparer collectionPreparer, String collectionName, + Document query, @Nullable Document fields, @Nullable Document sort, @Nullable Collation collation, + Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("findAndRemove using query: {} fields: {} sort: {} for class: {} in collection: {}", - serializeToJsonSafely(query), fields, sort, entityClass, collectionName); + LOGGER.debug(String.format("findAndRemove using query: %s fields: %s sort: %s for class: %s in collection: %s", + serializeToJsonSafely(query), fields, serializeToJsonSafely(sort), entityClass, collectionName)); } MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - return executeFindOneInternal( - new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort, collation), - new ReadDocumentCallback<>(readerToUse, entityClass, collectionName), collectionName); + return executeFindOneInternal(new FindAndRemoveCallback(collectionPreparer, + queryMapper.getMappedObject(query, entity), fields, sort, collation), + new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); } - protected T doFindAndModify(String collectionName, Document query, Document fields, Document sort, - Class entityClass, Update update, @Nullable FindAndModifyOptions options) { - - EntityReader readerToUse = this.mongoConverter; + @SuppressWarnings("ConstantConditions") + @Nullable T doFindAndModify(CollectionPreparer> collectionPreparer, + String collectionName, + Document query, @Nullable Document fields, @Nullable Document sort, Class entityClass, UpdateDefinition update, + @Nullable FindAndModifyOptions options, QueryResultConverter resultConverter) { if (options == null) { options = new FindAndModifyOptions(); @@ -2491,20 +2823,26 @@ protected T doFindAndModify(String collectionName, Document query, Document MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - increaseVersionForUpdateIfNecessary(entity, update); + UpdateContext updateContext = queryOperations.updateSingleContext(update, query, false); + updateContext.increaseVersionForUpdateIfNecessary(entity); - Document mappedQuery = queryMapper.getMappedObject(query, entity); - Document mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity); + Document mappedQuery = updateContext.getMappedQuery(entity); + Object mappedUpdate = updateContext.isAggregationUpdate() ? updateContext.getUpdatePipeline(entityClass) + : updateContext.getMappedUpdate(entity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug( - "findAndModify using query: {} fields: {} sort: {} for class: {} and update: {} " + "in collection: {}", - serializeToJsonSafely(mappedQuery), fields, sort, entityClass, serializeToJsonSafely(mappedUpdate), - collectionName); + LOGGER.debug(String.format( + "findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s in collection: %s", + serializeToJsonSafely(mappedQuery), fields, serializeToJsonSafely(sort), entityClass, + serializeToJsonSafely(mappedUpdate), collectionName)); } - return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, options), - new ReadDocumentCallback<>(readerToUse, entityClass, collectionName), collectionName); + DocumentCallback callback = getResultReader(EntityProjection.nonProjecting(entityClass), collectionName, resultConverter); + + return executeFindOneInternal( + new FindAndModifyCallback(collectionPreparer, mappedQuery, fields, sort, mappedUpdate, + update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()), options), + callback, collectionName); } /** @@ -2523,22 +2861,88 @@ protected T doFindAndModify(String collectionName, Document query, Document * {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}. */ @Nullable - protected T doFindAndReplace(String collectionName, Document mappedQuery, Document mappedFields, - Document mappedSort, @Nullable com.mongodb.client.model.Collation collation, Class entityType, - Document replacement, FindAndReplaceOptions options, Class resultType) { + protected T doFindAndReplace(CollectionPreparer> collectionPreparer, + String collectionName, + Document mappedQuery, Document mappedFields, Document mappedSort, + com.mongodb.client.model.@Nullable Collation collation, Class entityType, Document replacement, + FindAndReplaceOptions options, Class resultType) { + + EntityProjection projection = operations.introspectProjection(resultType, entityType); + + return doFindAndReplace(collectionPreparer, collectionName, mappedQuery, mappedFields, mappedSort, collation, + entityType, replacement, options, projection, QueryResultConverter.entity()); + } + + CollectionPreparerDelegate createDelegate(Query query) { + return CollectionPreparerDelegate.of(query); + } + + CollectionPreparer> createCollectionPreparer(Query query, @Nullable MongoAction action) { + CollectionPreparer> collectionPreparer = createDelegate(query); + if (action == null) { + return collectionPreparer; + } + return collectionPreparer.andThen(collection -> { + WriteConcern writeConcern = prepareWriteConcern(action); + return writeConcern != null ? collection.withWriteConcern(writeConcern) : collection; + }); + } + + /** + * Customize this part for findAndReplace. + * + * @param collectionName The name of the collection to perform the operation in. + * @param mappedQuery the query to look up documents. + * @param mappedFields the fields to project the result to. + * @param mappedSort the sort to be applied when executing the query. + * @param collation collation settings for the query. Can be {@literal null}. + * @param entityType the source domain type. + * @param replacement the replacement {@link Document}. + * @param options applicable options. + * @param projection the projection descriptor. + * @return {@literal null} if object does not exist, {@link FindAndReplaceOptions#isReturnNew() return new} is + * {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}. + * @since 3.4 + */ + @Nullable + private R doFindAndReplace(CollectionPreparer> collectionPreparer, + String collectionName, + Document mappedQuery, Document mappedFields, Document mappedSort, + com.mongodb.client.model.@Nullable Collation collation, Class entityType, Document replacement, + FindAndReplaceOptions options, EntityProjection projection, QueryResultConverter resultConverter) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug( - "findAndReplace using query: {} fields: {} sort: {} for class: {} and replacement: {} " + "in collection: {}", - serializeToJsonSafely(mappedQuery), serializeToJsonSafely(mappedFields), serializeToJsonSafely(mappedSort), - entityType, serializeToJsonSafely(replacement), collectionName); + LOGGER + .debug(String.format( + "findAndReplace using query: %s fields: %s sort: %s for class: %s and replacement: %s " + + "in collection: %s", + serializeToJsonSafely(mappedQuery), serializeToJsonSafely(mappedFields), + serializeToJsonSafely(mappedSort), entityType, serializeToJsonSafely(replacement), collectionName)); } - maybeEmitEvent(new BeforeSaveEvent<>(replacement, replacement, collectionName)); + DocumentCallback callback = getResultReader(projection, collectionName, resultConverter); + return executeFindOneInternal(new FindAndReplaceCallback(collectionPreparer, mappedQuery, mappedFields, mappedSort, + replacement, collation, options),callback, + collectionName); + } + + @SuppressWarnings("NullAway") + private UpdateResult doReplace(ReplaceOptions options, Class entityType, String collectionName, + UpdateContext updateContext, CollectionPreparer> collectionPreparer, + Document replacement) { - return executeFindOneInternal( - new FindAndReplaceCallback(mappedQuery, mappedFields, mappedSort, replacement, collation, options), - new ProjectingReadCallback<>(mongoConverter, entityType, resultType, collectionName), collectionName); + MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(entityType); + + ReplaceCallback replaceCallback = new ReplaceCallback(collectionPreparer, + updateContext.getMappedQuery(persistentEntity), replacement, updateContext.getReplaceOptions(entityType, it -> { + it.upsert(options.isUpsert()); + })); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("replace one using query: %s for class: %s in collection: %s", + serializeToJsonSafely(updateContext.getMappedQuery(persistentEntity)), entityType, collectionName)); + } + + return execute(collectionName, replaceCallback); } /** @@ -2567,24 +2971,24 @@ private MongoCollection getAndPrepareCollection(MongoDatabase db, Stri * Internal method using callbacks to do queries against the datastore that requires reading a single object from a * collection of objects. It will take the following steps *

    - *
  1. Execute the given {@link ConnectionCallback} for a {@link Document}.
  2. + *
  3. Execute the given {@link CollectionCallback} for a {@link Document}.
  4. *
  5. Apply the given {@link DocumentCallback} to each of the {@link Document}s to obtain the result.
  6. *
      * * @param * @param collectionCallback the callback to retrieve the {@link Document} with - * @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type + * @param documentCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type * @param collectionName the collection to be queried * @return */ @Nullable private T executeFindOneInternal(CollectionCallback collectionCallback, - DocumentCallback objectCallback, String collectionName) { + DocumentCallback documentCallback, String collectionName) { try { - T result = objectCallback - .doWith(collectionCallback.doInCollection(getAndPrepareCollection(doGetDatabase(), collectionName))); - return result; + + Document document = collectionCallback.doInCollection(getAndPrepareCollection(doGetDatabase(), collectionName)); + return document != null ? documentCallback.doWith(document) : null; } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } @@ -2594,51 +2998,38 @@ private T executeFindOneInternal(CollectionCallback collectionCall * Internal method using callback to do queries against the datastore that requires reading a collection of objects. * It will take the following steps *
        - *
      1. Execute the given {@link ConnectionCallback} for a {@link DBCursor}.
      2. - *
      3. Prepare that {@link DBCursor} with the given {@link CursorPreparer} (will be skipped if {@link CursorPreparer} - * is {@literal null}
      4. - *
      5. Iterate over the {@link DBCursor} and applies the given {@link DocumentCallback} to each of the + *
      6. Execute the given {@link CollectionCallback} for a {@link FindIterable}.
      7. + *
      8. Prepare that {@link FindIterable} with the given {@link CursorPreparer} (will be skipped if + * {@link CursorPreparer} is {@literal null}
      9. + *
      10. Iterate over the {@link FindIterable} and applies the given {@link DocumentCallback} to each of the * {@link Document}s collecting the actual result {@link List}.
      11. *
          * * @param - * @param collectionCallback the callback to retrieve the {@link DBCursor} with - * @param preparer the {@link CursorPreparer} to potentially modify the {@link DBCursor} before iterating over it - * @param objectCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type + * @param collectionCallback the callback to retrieve the {@link FindIterable} with + * @param preparer the {@link CursorPreparer} to potentially modify the {@link FindIterable} before iterating over it + * @param documentCallback the {@link DocumentCallback} to transform {@link Document}s into the actual domain type * @param collectionName the collection to be queried * @return */ private List executeFindMultiInternal(CollectionCallback> collectionCallback, - @Nullable CursorPreparer preparer, DocumentCallback objectCallback, String collectionName) { + CursorPreparer preparer, DocumentCallback documentCallback, String collectionName) { try { - MongoCursor cursor = null; - - try { - - FindIterable iterable = collectionCallback - .doInCollection(getAndPrepareCollection(doGetDatabase(), collectionName)); + try (MongoCursor cursor = preparer + .initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection) + .iterator()) { - if (preparer != null) { - iterable = preparer.prepare(iterable); - } - - cursor = iterable.iterator(); - - List result = new ArrayList<>(); + int available = cursor.available(); + List result = available > 0 ? new ArrayList<>(available) : new ArrayList<>(); while (cursor.hasNext()) { Document object = cursor.next(); - result.add(objectCallback.doWith(object)); + result.add(documentCallback.doWith(object)); } return result; - } finally { - - if (cursor != null) { - cursor.close(); - } } } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); @@ -2646,35 +3037,30 @@ private List executeFindMultiInternal(CollectionCallback> collectionCallback, - @Nullable CursorPreparer preparer, DocumentCallbackHandler callbackHandler, String collectionName) { - - try { - - MongoCursor cursor = null; - - try { - FindIterable iterable = collectionCallback - .doInCollection(getAndPrepareCollection(doGetDatabase(), collectionName)); - - if (preparer != null) { - iterable = preparer.prepare(iterable); - } + CursorPreparer preparer, DocumentCallbackHandler callbackHandler, String collectionName) { - cursor = iterable.iterator(); + try (MongoCursor cursor = preparer + .initiateFind(getAndPrepareCollection(doGetDatabase(), collectionName), collectionCallback::doInCollection) + .iterator()) { - while (cursor.hasNext()) { - callbackHandler.processDocument(cursor.next()); - } - } finally { - if (cursor != null) { - cursor.close(); - } + while (cursor.hasNext()) { + callbackHandler.processDocument(cursor.next()); } } catch (RuntimeException e) { throw potentiallyConvertRuntimeException(e, exceptionTranslator); } } + @SuppressWarnings("unchecked") + private DocumentCallback getResultReader(EntityProjection projection, String collectionName, + QueryResultConverter resultConverter) { + + DocumentCallback readCallback = new ProjectingReadCallback<>(mongoConverter, projection, collectionName); + + return resultConverter == QueryResultConverter.entity() ? (DocumentCallback) readCallback + : new QueryResultConverterCallback(resultConverter, readCallback); + } + public PersistenceExceptionTranslator getExceptionTranslator() { return exceptionTranslator; } @@ -2684,7 +3070,7 @@ private MongoPersistentEntity getPersistentEntity(@Nullable Class type) { return type != null ? mappingContext.getPersistentEntity(type) : null; } - private static MongoConverter getDefaultMongoConverter(MongoDbFactory factory) { + private static MongoConverter getDefaultMongoConverter(MongoDatabaseFactory factory) { DbRefResolver dbRefResolver = new DefaultDbRefResolver(factory); MongoCustomConversions conversions = new MongoCustomConversions(Collections.emptyList()); @@ -2695,46 +3081,34 @@ private static MongoConverter getDefaultMongoConverter(MongoDbFactory factory) { MappingMongoConverter converter = new MappingMongoConverter(dbRefResolver, mappingContext); converter.setCustomConversions(conversions); + converter.setCodecRegistryProvider(factory); converter.afterPropertiesSet(); return converter; } - private Document getMappedSortObject(Query query, Class type) { + @Nullable + private Document getMappedSortObject(@Nullable Query query, Class type) { - if (query == null || ObjectUtils.isEmpty(query.getSortObject())) { + if (query == null) { return null; } - return queryMapper.getMappedSort(query.getSortObject(), mappingContext.getPersistentEntity(type)); + return getMappedSortObject(query.getSortObject(), type); } - private Document getMappedFieldsObject(Document fields, MongoPersistentEntity entity, Class targetType) { - return queryMapper.getMappedFields(addFieldsForProjection(fields, entity.getType(), targetType), entity); + private @Nullable Document getMappedSortObject(@Nullable Document sortObject, Class type) { + return getMappedSortObject(sortObject, mappingContext.getPersistentEntity(type)); } - /** - * For cases where {@code fields} is {@literal null} or {@literal empty} add fields required for creating the - * projection (target) type if the {@code targetType} is a {@literal closed interface projection}. - * - * @param fields can be {@literal null}. - * @param domainType must not be {@literal null}. - * @param targetType must not be {@literal null}. - * @return {@link Document} with fields to be included. - */ - private Document addFieldsForProjection(Document fields, Class domainType, Class targetType) { - - if (!fields.isEmpty() || !targetType.isInterface() || ClassUtils.isAssignable(domainType, targetType)) { - return fields; - } - ProjectionInformation projectionInformation = projectionFactory.getProjectionInformation(targetType); + private @Nullable Document getMappedSortObject(@Nullable Document sortObject, @Nullable MongoPersistentEntity entity) { - if (projectionInformation.isClosed()) { - projectionInformation.getInputProperties().forEach(it -> fields.append(it.getName(), 1)); + if (ObjectUtils.isEmpty(sortObject)) { + return null; } - return fields; + return queryMapper.getMappedSort(sortObject, entity); } /** @@ -2755,7 +3129,7 @@ static RuntimeException potentiallyConvertRuntimeException(RuntimeException ex, /** * Simple {@link CollectionCallback} that takes a query {@link Document} plus an optional fields specification - * {@link Document} and executes that against the {@link DBCollection}. + * {@link Document} and executes that against the {@link MongoCollection}. * * @author Oliver Gierke * @author Thomas Risberg @@ -2763,24 +3137,25 @@ static RuntimeException potentiallyConvertRuntimeException(RuntimeException ex, */ private static class FindOneCallback implements CollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Optional fields; + private final CursorPreparer cursorPreparer; - public FindOneCallback(Document query, Document fields) { + FindOneCallback(CollectionPreparer> collectionPreparer, Document query, Document fields, + CursorPreparer preparer) { + + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = Optional.of(fields).filter(it -> !ObjectUtils.isEmpty(fields)); + this.cursorPreparer = preparer; } + @Override public Document doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - FindIterable iterable = collection.find(query, Document.class); - - if (LOGGER.isDebugEnabled()) { - - LOGGER.debug("findOne using query: {} fields: {} in db.collection: {}", serializeToJsonSafely(query), - serializeToJsonSafely(fields.orElseGet(Document::new)), - collection.getNamespace() != null ? collection.getNamespace().getFullName() : "n/a"); - } + FindIterable iterable = cursorPreparer.initiateFind(collection, + col -> collectionPreparer.prepare(col).find(query, Document.class)); if (fields.isPresent()) { iterable = iterable.projection(fields.get()); @@ -2792,7 +3167,7 @@ public Document doInCollection(MongoCollection collection) throws Mong /** * Simple {@link CollectionCallback} that takes a query {@link Document} plus an optional fields specification - * {@link Document} and executes that against the {@link DBCollection}. + * {@link Document} and executes that against the {@link MongoCollection}. * * @author Oliver Gierke * @author Thomas Risberg @@ -2800,58 +3175,83 @@ public Document doInCollection(MongoCollection collection) throws Mong */ private static class FindCallback implements CollectionCallback> { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Document fields; + private final com.mongodb.client.model.@Nullable Collation collation; - public FindCallback(Document query, Document fields) { + public FindCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, com.mongodb.client.model.@Nullable Collation collation) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(fields, "Fields must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(fields, "Fields must not be null"); + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; + this.collation = collation; } + @Override public FindIterable doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - return collection.find(query, Document.class).projection(fields); + FindIterable findIterable = collectionPreparer.prepare(collection).find(query, Document.class) + .projection(fields); + + if (collation != null) { + findIterable = findIterable.collation(collation); + } + return findIterable; } } /** - * Optimized {@link CollectionCallback} that takes an already mappend query and a nullable + * Optimized {@link CollectionCallback} that takes an already mapped query and a nullable * {@link com.mongodb.client.model.Collation} to execute a count query limited to one element. * * @author Christoph Strobl * @since 2.0 */ - @RequiredArgsConstructor - private static class ExistsCallback implements CollectionCallback { + private class ExistsCallback implements CollectionCallback { + private final CollectionPreparer collectionPreparer; private final Document mappedQuery; - private final com.mongodb.client.model.Collation collation; + private final com.mongodb.client.model.@Nullable Collation collation; + + ExistsCallback(CollectionPreparer collectionPreparer, Document mappedQuery, + com.mongodb.client.model.@Nullable Collation collation) { + + this.collectionPreparer = collectionPreparer; + this.mappedQuery = mappedQuery; + this.collation = collation; + } @Override public Boolean doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - return collection.count(mappedQuery, new CountOptions().limit(1).collation(collation)) > 0; + + return doCount(collectionPreparer, collection.getNamespace().getCollectionName(), mappedQuery, + new CountOptions().limit(1).collation(collation)) > 0; } } /** * Simple {@link CollectionCallback} that takes a query {@link Document} plus an optional fields specification - * {@link Document} and executes that against the {@link DBCollection}. + * {@link Document} and executes that against the {@link MongoCollection}. * * @author Thomas Risberg */ private static class FindAndRemoveCallback implements CollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; - private final Document fields; - private final Document sort; + private final @Nullable Document fields; + private final @Nullable Document sort; private final Optional collation; - public FindAndRemoveCallback(Document query, Document fields, Document sort, @Nullable Collation collation) { + FindAndRemoveCallback(CollectionPreparer> collectionPreparer, Document query, + @Nullable Document fields, @Nullable Document sort, @Nullable Collation collation) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; @@ -2859,32 +3259,40 @@ public FindAndRemoveCallback(Document query, Document fields, Document sort, @Nu this.collation = Optional.ofNullable(collation); } + @Override public Document doInCollection(MongoCollection collection) throws MongoException, DataAccessException { FindOneAndDeleteOptions opts = new FindOneAndDeleteOptions().sort(sort).projection(fields); collation.map(Collation::toMongoCollation).ifPresent(opts::collation); - return collection.findOneAndDelete(query, opts); + return collectionPreparer.prepare(collection).findOneAndDelete(query, opts); } } private static class FindAndModifyCallback implements CollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; - private final Document fields; - private final Document sort; - private final Document update; + private final @Nullable Document fields; + private final @Nullable Document sort; + private final Object update; + private final List arrayFilters; private final FindAndModifyOptions options; - public FindAndModifyCallback(Document query, Document fields, Document sort, Document update, + FindAndModifyCallback(CollectionPreparer> collectionPreparer, Document query, + @Nullable Document fields, @Nullable Document sort, Object update, List arrayFilters, FindAndModifyOptions options) { + + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; this.sort = sort; this.update = update; + this.arrayFilters = arrayFilters; this.options = options; } + @Override public Document doInCollection(MongoCollection collection) throws MongoException, DataAccessException { FindOneAndUpdateOptions opts = new FindOneAndUpdateOptions(); @@ -2899,7 +3307,17 @@ public Document doInCollection(MongoCollection collection) throws Mong options.getCollation().map(Collation::toMongoCollation).ifPresent(opts::collation); - return collection.findOneAndUpdate(query, update, opts); + if (!arrayFilters.isEmpty()) { + opts.arrayFilters(arrayFilters); + } + + if (update instanceof Document document) { + return collectionPreparer.prepare(collection).findOneAndUpdate(query, document, opts); + } else if (update instanceof List) { + return collectionPreparer.prepare(collection).findOneAndUpdate(query, (List) update, opts); + } + + throw new IllegalArgumentException(String.format("Using %s is not supported in findOneAndUpdate", update)); } } @@ -2912,16 +3330,18 @@ public Document doInCollection(MongoCollection collection) throws Mong */ private static class FindAndReplaceCallback implements CollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Document fields; private final Document sort; private final Document update; - private final @Nullable com.mongodb.client.model.Collation collation; + private final com.mongodb.client.model.@Nullable Collation collation; private final FindAndReplaceOptions options; - FindAndReplaceCallback(Document query, Document fields, Document sort, Document update, - com.mongodb.client.model.Collation collation, FindAndReplaceOptions options) { - + FindAndReplaceCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, Document update, com.mongodb.client.model.@Nullable Collation collation, + FindAndReplaceOptions options) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; this.sort = sort; @@ -2930,10 +3350,6 @@ private static class FindAndReplaceCallback implements CollectionCallback collection) throws MongoException, DataAccessException { @@ -2950,7 +3366,7 @@ public Document doInCollection(MongoCollection collection) throws Mong opts.returnDocument(ReturnDocument.AFTER); } - return collection.findOneAndReplace(query, update, opts); + return collectionPreparer.prepare(collection).findOneAndReplace(query, update, opts); } } @@ -2961,10 +3377,9 @@ public Document doInCollection(MongoCollection collection) throws Mong * @author Thomas Darimont */ - interface DocumentCallback { + protected interface DocumentCallback { - @Nullable - T doWith(@Nullable Document object); + T doWith(Document object); } /** @@ -2973,179 +3388,164 @@ interface DocumentCallback { * * @author Oliver Gierke * @author Christoph Strobl + * @author Roman Puchkovskiy */ - @RequiredArgsConstructor private class ReadDocumentCallback implements DocumentCallback { - private final @NonNull EntityReader reader; - private final @NonNull Class type; + private final EntityReader reader; + private final Class type; private final String collectionName; - @Nullable - public T doWith(@Nullable Document object) { + ReadDocumentCallback(EntityReader reader, Class type, String collectionName) { - if (null != object) { - maybeEmitEvent(new AfterLoadEvent<>(object, type, collectionName)); - } + this.reader = reader; + this.type = type; + this.collectionName = collectionName; + } - T source = reader.read(type, object); + @Override + public T doWith(Document document) { + + maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName)); + T entity = reader.read(type, document); - if (null != source) { - maybeEmitEvent(new AfterConvertEvent<>(object, source, collectionName)); + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", reader)); } - return source; + maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); + entity = maybeCallAfterConvert(entity, document, collectionName); + + return entity; + } + } + + static class QueryResultConverterCallback implements DocumentCallback { + + private final QueryResultConverter converter; + private final DocumentCallback delegate; + + QueryResultConverterCallback(QueryResultConverter converter, DocumentCallback delegate) { + this.converter = converter; + this.delegate = delegate; + } + + @Override + public R doWith(Document object) { + + Lazy lazy = Lazy.of(() -> delegate.doWith(object)); + return converter.mapDocument(object, lazy::get); } } /** * {@link DocumentCallback} transforming {@link Document} into the given {@code targetType} or decorating the - * {@code sourceType} with a {@literal projection} in case the {@code targetType} is an {@litera interface}. + * {@code sourceType} with a {@literal projection} in case the {@code targetType} is an {@literal interface}. * * @param * @param * @since 2.0 */ - @RequiredArgsConstructor private class ProjectingReadCallback implements DocumentCallback { - private final @NonNull EntityReader reader; - private final @NonNull Class entityType; - private final @NonNull Class targetType; - private final @NonNull String collectionName; - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoTemplate.DocumentCallback#doWith(org.bson.Document) - */ - @SuppressWarnings("unchecked") - @Nullable - public T doWith(@Nullable Document object) { - - if (object == null) { - return null; - } - - Class typeToRead = targetType.isInterface() || targetType.isAssignableFrom(entityType) ? entityType - : targetType; - - if (null != object) { - maybeEmitEvent(new AfterLoadEvent<>(object, targetType, collectionName)); - } - - Object source = reader.read(typeToRead, object); - Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, source) : source; - - if (null != result) { - maybeEmitEvent(new AfterConvertEvent<>(object, result, collectionName)); - } - - return (T) result; - } - } + private final MongoConverter mongoConverter; + private final EntityProjection projection; + private final String collectionName; - class UnwrapAndReadDocumentCallback extends ReadDocumentCallback { + ProjectingReadCallback(MongoConverter mongoConverter, EntityProjection projection, String collectionName) { - public UnwrapAndReadDocumentCallback(EntityReader reader, Class type, String collectionName) { - super(reader, type, collectionName); + this.mongoConverter = mongoConverter; + this.projection = projection; + this.collectionName = collectionName; } @Override - public T doWith(@Nullable Document object) { - - if (object == null) { - return null; - } - - Object idField = object.get(Fields.UNDERSCORE_ID); + @SuppressWarnings("unchecked") + public T doWith(Document document) { - if (!(idField instanceof Document)) { - return super.doWith(object); - } + maybeEmitEvent(new AfterLoadEvent<>(document, projection.getMappedType().getType(), collectionName)); - Document toMap = new Document(); - Document nested = (Document) idField; - toMap.putAll(nested); + Object entity = mongoConverter.project(projection, document); - for (String key : object.keySet()) { - if (!Fields.UNDERSCORE_ID.equals(key)) { - toMap.put(key, object.get(key)); - } + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", mongoConverter)); } - return super.doWith(toMap); + maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); + return (T) maybeCallAfterConvert(entity, document, collectionName); } } - class QueryCursorPreparer implements CursorPreparer { + class QueryCursorPreparer implements SortingQueryCursorPreparer { - private final @Nullable Query query; + private final Query query; + private final Document sortObject; + private final int limit; + private final long skip; private final @Nullable Class type; - public QueryCursorPreparer(@Nullable Query query, @Nullable Class type) { + QueryCursorPreparer(Query query, @Nullable Class type) { + this(query, query.getSortObject(), query.getLimit(), query.getSkip(), type); + } + QueryCursorPreparer(Query query, Document sortObject, int limit, long skip, @Nullable Class type) { this.query = query; + this.sortObject = sortObject; + this.limit = limit; + this.skip = skip; this.type = type; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.CursorPreparer#prepare(com.mongodb.DBCursor) - */ - public FindIterable prepare(FindIterable cursor) { + @Override + public FindIterable prepare(FindIterable iterable) { - if (query == null) { - return cursor; - } + FindIterable cursorToUse = iterable; + + operations.forType(type).getCollation(query) // + .map(Collation::toMongoCollation) // + .ifPresent(cursorToUse::collation); Meta meta = query.getMeta(); - if (query.getSkip() <= 0 && query.getLimit() <= 0 && ObjectUtils.isEmpty(query.getSortObject()) - && !StringUtils.hasText(query.getHint()) && !meta.hasValues() && !query.getCollation().isPresent()) { - return cursor; + HintFunction hintFunction = HintFunction.from(query.getHint()); + if (skip <= 0 && limit <= 0 && ObjectUtils.isEmpty(sortObject) && hintFunction.isEmpty() && !meta.hasValues() + && query.getCollation().isEmpty()) { + return cursorToUse; } - FindIterable cursorToUse; - - cursorToUse = query.getCollation().map(Collation::toMongoCollation).map(cursor::collation).orElse(cursor); - try { - if (query.getSkip() > 0) { - cursorToUse = cursorToUse.skip((int) query.getSkip()); + if (skip > 0) { + cursorToUse = cursorToUse.skip((int) skip); } - if (query.getLimit() > 0) { - cursorToUse = cursorToUse.limit(query.getLimit()); + if (limit > 0) { + cursorToUse = cursorToUse.limit(limit); } - if (!ObjectUtils.isEmpty(query.getSortObject())) { - Document sort = type != null ? getMappedSortObject(query, type) : query.getSortObject(); + if (!ObjectUtils.isEmpty(sortObject)) { + Document sort = type != null ? getMappedSortObject(sortObject, type) : sortObject; cursorToUse = cursorToUse.sort(sort); } - if (StringUtils.hasText(query.getHint())) { - cursorToUse = cursorToUse.hint(Document.parse(query.getHint())); + if (hintFunction.isPresent()) { + cursorToUse = hintFunction.apply(mongoDbFactory, cursorToUse::hintString, cursorToUse::hint); } if (meta.hasValues()) { - if (StringUtils.hasText(meta.getComment())) { - cursorToUse = cursorToUse.comment(meta.getComment()); - } - - if (meta.getSnapshot()) { - cursorToUse = cursorToUse.snapshot(meta.getSnapshot()); + if (meta.hasComment()) { + cursorToUse = cursorToUse.comment(meta.getRequiredComment()); } - if (meta.getMaxScan() != null) { - cursorToUse = cursorToUse.maxScan(meta.getMaxScan()); - } - - if (meta.getMaxTimeMsec() != null) { - cursorToUse = cursorToUse.maxTime(meta.getMaxTimeMsec(), TimeUnit.MILLISECONDS); + if (meta.hasMaxTime()) { + cursorToUse = cursorToUse.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS); } if (meta.getCursorBatchSize() != null) { cursorToUse = cursorToUse.batchSize(meta.getCursorBatchSize()); } + if (meta.getAllowDiskUse() != null) { + cursorToUse = cursorToUse.allowDiskUse(meta.getAllowDiskUse()); + } + for (Meta.CursorOption option : meta.getFlags()) { switch (option) { @@ -3156,6 +3556,8 @@ public FindIterable prepare(FindIterable cursor) { case PARTIAL: cursorToUse = cursorToUse.partial(true); break; + case SECONDARY_READS: + break; default: throw new IllegalArgumentException(String.format("%s is no supported flag.", option)); } @@ -3168,6 +3570,12 @@ public FindIterable prepare(FindIterable cursor) { return cursorToUse; } + + @Nullable + @Override + public Document getSortObject() { + return sortObject; + } } /** @@ -3175,9 +3583,11 @@ public FindIterable prepare(FindIterable cursor) { * a delegate and creates a {@link GeoResult} from the result. * * @author Oliver Gierke + * @author Christoph Strobl */ static class GeoNearResultDocumentCallback implements DocumentCallback> { + private final String distanceField; private final DocumentCallback delegate; private final Metric metric; @@ -3185,35 +3595,47 @@ static class GeoNearResultDocumentCallback implements DocumentCallback delegate, Metric metric) { + GeoNearResultDocumentCallback(String distanceField, DocumentCallback delegate, Metric metric) { - Assert.notNull(delegate, "DocumentCallback must not be null!"); + Assert.notNull(delegate, "DocumentCallback must not be null"); + this.distanceField = distanceField; this.delegate = delegate; this.metric = metric; } - @Nullable - public GeoResult doWith(@Nullable Document object) { + @Override + public GeoResult doWith(Document object) { - double distance = ((Double) object.get("dis")).doubleValue(); - Document content = (Document) object.get("obj"); + double distance = Double.NaN; + if (object.containsKey(distanceField)) { + distance = NumberUtils.convertNumberToTargetClass(object.get(distanceField, Number.class), Double.class); + } - T doWith = delegate.doWith(content); + T doWith = delegate.doWith(object); - return new GeoResult<>(doWith, new Distance(distance, metric)); + return new GeoResult<>(doWith, Distance.of(distance, metric)); } } /** - * A {@link CloseableIterator} that is backed by a MongoDB {@link Cursor}. + * @return the {@link MongoDatabaseFactory} in use. + * @since 3.1.4 + */ + public MongoDatabaseFactory getMongoDatabaseFactory() { + return mongoDbFactory; + } + + /** + * A {@link CloseableIterator} that is backed by a MongoDB {@link MongoCollection}. * * @author Thomas Darimont * @since 1.7 */ - @AllArgsConstructor(access = AccessLevel.PACKAGE) static class CloseableIterableCursorAdapter implements CloseableIterator { private volatile @Nullable MongoCursor cursor; @@ -3221,20 +3643,24 @@ static class CloseableIterableCursorAdapter implements CloseableIterator { private DocumentCallback objectReadCallback; /** - * Creates a new {@link CloseableIterableCursorAdapter} backed by the given {@link Cursor}. - * - * @param cursor - * @param exceptionTranslator - * @param objectReadCallback + * Creates a new {@link CloseableIterableCursorAdapter} backed by the given {@link MongoCollection}. */ - public CloseableIterableCursorAdapter(MongoIterable cursor, - PersistenceExceptionTranslator exceptionTranslator, DocumentCallback objectReadCallback) { + CloseableIterableCursorAdapter(MongoIterable cursor, PersistenceExceptionTranslator exceptionTranslator, + DocumentCallback objectReadCallback) { this.cursor = cursor.iterator(); this.exceptionTranslator = exceptionTranslator; this.objectReadCallback = objectReadCallback; } + CloseableIterableCursorAdapter(MongoCursor cursor, PersistenceExceptionTranslator exceptionTranslator, + DocumentCallback objectReadCallback) { + + this.cursor = cursor; + this.exceptionTranslator = exceptionTranslator; + this.objectReadCallback = objectReadCallback; + } + @Override public boolean hasNext() { @@ -3261,8 +3687,7 @@ public T next() { try { Document item = cursor.next(); - T converted = objectReadCallback.doWith(item); - return converted; + return objectReadCallback.doWith(item); } catch (RuntimeException ex) { throw potentiallyConvertRuntimeException(ex, exceptionTranslator); } @@ -3282,20 +3707,13 @@ public void close() { throw potentiallyConvertRuntimeException(ex, exceptionTranslator); } finally { cursor = null; - exceptionTranslator = null; - objectReadCallback = null; } } } - public MongoDbFactory getMongoDbFactory() { - return mongoDbFactory; - } - /** * {@link MongoTemplate} extension bound to a specific {@link ClientSession} that is applied when interacting with the - * server through the driver API. - *

          + * server through the driver API.
          * The prepare steps for {@link MongoDatabase} and {@link MongoCollection} proxy the target and invoke the desired * target method matching the actual arguments plus a {@link ClientSession}. * @@ -3313,27 +3731,19 @@ static class SessionBoundMongoTemplate extends MongoTemplate { */ SessionBoundMongoTemplate(ClientSession session, MongoTemplate that) { - super(that.getMongoDbFactory().withSession(session), that); + super(that.getMongoDatabaseFactory().withSession(session), that); this.delegate = that; this.session = session; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoTemplate#getCollection(java.lang.String) - */ @Override - public MongoCollection getCollection(String collectionName) { + public MongoCollection getCollection(@Nullable String collectionName) { // native MongoDB objects that offer methods with ClientSession must not be proxied. return delegate.getCollection(collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoTemplate#getDb() - */ @Override public MongoDatabase getDb() { @@ -3341,25 +3751,37 @@ public MongoDatabase getDb() { return delegate.getDb(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoTemplate#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override - @SuppressWarnings("unchecked") - public long count(Query query, @Nullable Class entityClass, String collectionName) { + protected boolean countCanBeEstimated(Document filter, CountOptions options) { + return false; + } + } - if (!session.hasActiveTransaction()) { - return super.count(query, entityClass, collectionName); - } + @FunctionalInterface + interface CountExecution { + long countDocuments(CollectionPreparer collectionPreparer, String collection, Document filter, + CountOptions options); + } + + private static class ReplaceCallback implements CollectionCallback { - CountOptions options = new CountOptions(); - query.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation); + private final CollectionPreparer> collectionPreparer; + private final Document query; + private final Document update; + private final com.mongodb.client.model.ReplaceOptions options; - Document document = delegate.queryMapper.getMappedObject(query.getQueryObject(), - Optional.ofNullable(entityClass).map(it -> delegate.mappingContext.getPersistentEntity(entityClass))); + ReplaceCallback(CollectionPreparer> collectionPreparer, Document query, Document update, + com.mongodb.client.model.ReplaceOptions options) { + this.collectionPreparer = collectionPreparer; + this.query = query; + this.update = update; + this.options = options; + } - return execute(collectionName, collection -> collection.countDocuments(document, options)); + @Override + public UpdateResult doInCollection(MongoCollection collection) + throws MongoException, DataAccessException { + return collectionPreparer.prepare(collection).replaceOne(query, update, options); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/PropertyOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/PropertyOperations.java new file mode 100644 index 0000000000..583b243aa8 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/PropertyOperations.java @@ -0,0 +1,80 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.bson.Document; + +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.PersistentPropertyTranslator; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.util.Predicates; + +/** + * Common operations performed on properties of an entity like extracting fields information for projection creation. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.1 + */ +class PropertyOperations { + + private final MappingContext, MongoPersistentProperty> mappingContext; + + PropertyOperations(MappingContext, MongoPersistentProperty> mappingContext) { + this.mappingContext = mappingContext; + } + + /** + * For cases where {@code fields} is {@link Document#isEmpty() empty} include only fields that are required for + * creating the projection (target) type if the {@code EntityProjection} is a {@literal DTO projection} or a + * {@literal closed interface projection}. + * + * @param projection must not be {@literal null}. + * @param fields must not be {@literal null}. + * @return {@link Document} with fields to be included. + */ + Document computeMappedFieldsForProjection(EntityProjection projection, + Document fields) { + + if (!projection.isClosedProjection()) { + return fields; + } + + Document projectedFields = new Document(); + + if (projection.getMappedType().getType().isInterface()) { + projection.forEach(it -> { + projectedFields.put(it.getPropertyPath().getSegment(), 1); + }); + } else { + + // DTO projections use merged metadata between domain type and result type + PersistentPropertyTranslator translator = PersistentPropertyTranslator.create( + mappingContext.getRequiredPersistentEntity(projection.getDomainType()), + Predicates.negate(MongoPersistentProperty::hasExplicitFieldName)); + + MongoPersistentEntity persistentEntity = mappingContext + .getRequiredPersistentEntity(projection.getMappedType()); + for (MongoPersistentProperty property : persistentEntity) { + projectedFields.put(translator.translate(property).getFieldName(), 1); + } + } + + return projectedFields; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java new file mode 100644 index 0000000000..4ae618eaa1 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryOperations.java @@ -0,0 +1,1051 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeUnit; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.bson.BsonValue; +import org.bson.Document; +import org.bson.codecs.Codec; +import org.bson.conversions.Bson; +import org.bson.types.ObjectId; +import org.jspecify.annotations.Nullable; +import org.springframework.data.mapping.PropertyPath; +import org.springframework.data.mapping.PropertyReferenceException; +import org.springframework.data.mapping.context.MappingContext; +import org.springframework.data.mongodb.CodecRegistryProvider; +import org.springframework.data.mongodb.MongoExpression; +import org.springframework.data.mongodb.core.MappedDocument.MappedUpdate; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.AggregationExpression; +import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.TypedAggregation; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; +import org.springframework.data.mongodb.core.mapping.FieldName; +import org.springframework.data.mongodb.core.mapping.MongoId; +import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; +import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; +import org.springframework.data.mongodb.core.mapping.ShardKey; +import org.springframework.data.mongodb.core.query.BasicQuery; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Meta; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.data.projection.EntityProjection; +import org.springframework.data.util.Lazy; +import org.springframework.util.Assert; +import org.springframework.util.ClassUtils; + +import com.mongodb.client.model.CountOptions; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.ReplaceOptions; +import com.mongodb.client.model.UpdateOptions; + +/** + * {@link QueryOperations} centralizes common operations required before an operation is actually ready to be executed. + * This involves mapping {@link Query queries} into their respective MongoDB representation, computing execution options + * for {@literal count}, {@literal remove}, and other methods. + * + * @author Christoph Strobl + * @author Mark Paluch + * @author Florian Lüdiger + * @since 3.0 + */ +class QueryOperations { + + private final QueryMapper queryMapper; + private final UpdateMapper updateMapper; + private final EntityOperations entityOperations; + private final PropertyOperations propertyOperations; + private final CodecRegistryProvider codecRegistryProvider; + private final MappingContext, MongoPersistentProperty> mappingContext; + private final AggregationUtil aggregationUtil; + private final Map, Document> mappedShardKey = new ConcurrentHashMap<>(1); + + /** + * Create a new instance of {@link QueryOperations}. + * + * @param queryMapper must not be {@literal null}. + * @param updateMapper must not be {@literal null}. + * @param entityOperations must not be {@literal null}. + * @param propertyOperations must not be {@literal null}. + * @param codecRegistryProvider must not be {@literal null}. + */ + QueryOperations(QueryMapper queryMapper, UpdateMapper updateMapper, EntityOperations entityOperations, + PropertyOperations propertyOperations, CodecRegistryProvider codecRegistryProvider) { + + this.queryMapper = queryMapper; + this.updateMapper = updateMapper; + this.entityOperations = entityOperations; + this.propertyOperations = propertyOperations; + this.codecRegistryProvider = codecRegistryProvider; + this.mappingContext = queryMapper.getMappingContext(); + this.aggregationUtil = new AggregationUtil(queryMapper, mappingContext); + } + + InsertContext createInsertContext(Document source) { + return createInsertContext(MappedDocument.of(source)); + } + + InsertContext createInsertContext(MappedDocument mappedDocument) { + return new InsertContext(mappedDocument); + } + + /** + * Create a new {@link QueryContext} instance. + * + * @param query must not be {@literal null}. + * @return new instance of {@link QueryContext}. + */ + QueryContext createQueryContext(Query query) { + return new QueryContext(query); + } + + /** + * Create a new {@link DistinctQueryContext} instance. + * + * @param query must not be {@literal null}. + * @return new instance of {@link DistinctQueryContext}. + */ + DistinctQueryContext distinctQueryContext(Query query, String fieldName) { + return new DistinctQueryContext(query, fieldName); + } + + /** + * Create a new {@link CountContext} instance. + * + * @param query must not be {@literal null}. + * @return new instance of {@link CountContext}. + */ + CountContext countQueryContext(Query query) { + return new CountContext(query); + } + + /** + * Create a new {@link UpdateContext} instance affecting multiple documents. + * + * @param updateDefinition must not be {@literal null}. + * @param query must not be {@literal null}. + * @param upsert use {@literal true} to insert diff when no existing document found. + * @return new instance of {@link UpdateContext}. + */ + UpdateContext updateContext(UpdateDefinition updateDefinition, Query query, boolean upsert) { + return new UpdateContext(updateDefinition, query, true, upsert); + } + + /** + * Create a new {@link UpdateContext} instance affecting a single document. + * + * @param updateDefinition must not be {@literal null}. + * @param query must not be {@literal null}. + * @param upsert use {@literal true} to insert diff when no existing document found. + * @return new instance of {@link UpdateContext}. + */ + UpdateContext updateSingleContext(UpdateDefinition updateDefinition, Query query, boolean upsert) { + return new UpdateContext(updateDefinition, query, false, upsert); + } + + /** + * Create a new {@link UpdateContext} instance affecting a single document. + * + * @param updateDefinition must not be {@literal null}. + * @param query must not be {@literal null}. + * @param upsert use {@literal true} to insert diff when no existing document found. + * @return new instance of {@link UpdateContext}. + */ + UpdateContext updateSingleContext(UpdateDefinition updateDefinition, Document query, boolean upsert) { + return new UpdateContext(updateDefinition, query, false, upsert); + } + + /** + * @param replacement the {@link MappedDocument mapped replacement} document. + * @param upsert use {@literal true} to insert diff when no existing document found. + * @return new instance of {@link UpdateContext}. + */ + UpdateContext replaceSingleContext(MappedDocument replacement, boolean upsert) { + return new UpdateContext(replacement, upsert); + } + + /** + * @param replacement the {@link MappedDocument mapped replacement} document. + * @param upsert use {@literal true} to insert diff when no existing document found. + * @return new instance of {@link UpdateContext}. + */ + UpdateContext replaceSingleContext(Query query, MappedDocument replacement, boolean upsert) { + return new UpdateContext(query, replacement, upsert); + } + + /** + * Create a new {@link DeleteContext} instance removing all matching documents. + * + * @param query must not be {@literal null}. + * @return new instance of {@link QueryContext}. + */ + DeleteContext deleteQueryContext(Query query) { + return new DeleteContext(query, true); + } + + /** + * Create a new {@link DeleteContext} instance only the first matching document. + * + * @param query must not be {@literal null}. + * @return new instance of {@link QueryContext}. + */ + DeleteContext deleteSingleContext(Query query) { + return new DeleteContext(query, false); + } + + /** + * Create a new {@link AggregationDefinition} for the given {@link Aggregation}. + * + * @param aggregation must not be {@literal null}. + * @param inputType fallback mapping type in case of untyped aggregation. Can be {@literal null}. + * @return new instance of {@link AggregationDefinition}. + * @since 3.2 + */ + AggregationDefinition createAggregation(Aggregation aggregation, @Nullable Class inputType) { + return new AggregationDefinition(aggregation, inputType); + } + + /** + * Create a new {@link AggregationDefinition} for the given {@link Aggregation}. + * + * @param aggregation must not be {@literal null}. + * @param aggregationOperationContext the {@link AggregationOperationContext} to use. Can be {@literal null}. + * @return new instance of {@link AggregationDefinition}. + * @since 3.2 + */ + AggregationDefinition createAggregation(Aggregation aggregation, + @Nullable AggregationOperationContext aggregationOperationContext) { + return new AggregationDefinition(aggregation, aggregationOperationContext); + } + + /** + * {@link InsertContext} encapsulates common tasks required to interact with {@link Document} to be inserted. + * + * @since 3.4.3 + */ + class InsertContext { + + private final MappedDocument source; + + private InsertContext(MappedDocument source) { + this.source = source; + } + + /** + * Prepare the {@literal _id} field. May generate a new {@literal id} value and convert it to the id properties + * {@link MongoPersistentProperty#getFieldType() target type}. + * + * @param type must not be {@literal null}. + * @param + * @return the {@link MappedDocument} containing the changes. + * @see #prepareId(MongoPersistentEntity) + */ + MappedDocument prepareId(Class type) { + return prepareId(mappingContext.getPersistentEntity(type)); + } + + /** + * Prepare the {@literal _id} field. May generate a new {@literal id} value and convert it to the id properties + * {@link MongoPersistentProperty#getFieldType() target type}. + * + * @param entity can be {@literal null}. + * @param + * @return the {@link MappedDocument} containing the changes. + */ + @SuppressWarnings("NullAway") + MappedDocument prepareId(@Nullable MongoPersistentEntity entity) { + + if (entity == null || source.hasId()) { + return source; + } + + MongoPersistentProperty idProperty = entity.getIdProperty(); + if (idProperty != null + && (idProperty.hasExplicitWriteTarget() || idProperty.isAnnotationPresent(MongoId.class))) { + if (!ClassUtils.isAssignable(ObjectId.class, idProperty.getFieldType())) { + source.updateId(queryMapper.convertId(new ObjectId(), idProperty.getFieldType())); + } + } + return source; + } + } + + /** + * {@link QueryContext} encapsulates common tasks required to convert a {@link Query} into its MongoDB document + * representation, mapping field names, as well as determining and applying {@link Collation collations}. + * + * @author Christoph Strobl + */ + class QueryContext { + + private final Query query; + + /** + * Create new a {@link QueryContext} instance from the given {@literal query} (can be either a {@link Query} or a + * plain {@link Document}. + * + * @param query can be {@literal null}. + */ + private QueryContext(@Nullable Query query) { + this.query = query != null ? query : new Query(); + } + + /** + * @return never {@literal null}. + */ + Query getQuery() { + return query; + } + + /** + * Extract the raw {@link Query#getQueryObject() unmapped document} from the {@link Query}. + * + * @return + */ + Document getQueryObject() { + return query.getQueryObject(); + } + + /** + * Get the already mapped MongoDB query representation. + * + * @param domainType can be {@literal null}. + * @param entityLookup the {@link Function lookup} used to provide the {@link MongoPersistentEntity} for the + * given{@literal domainType} + * @param + * @return never {@literal null}. + */ + Document getMappedQuery(@Nullable Class domainType, + Function, MongoPersistentEntity> entityLookup) { + return getMappedQuery(domainType == null ? null : entityLookup.apply(domainType)); + } + + /** + * Get the already mapped MongoDB query representation. + * + * @param entity the Entity to map field names to. Can be {@literal null}. + * @param + * @return never {@literal null}. + */ + Document getMappedQuery(@Nullable MongoPersistentEntity entity) { + return queryMapper.getMappedObject(getQueryObject(), entity); + } + + @SuppressWarnings("NullAway") + Document getMappedFields(@Nullable MongoPersistentEntity entity, EntityProjection projection) { + + Document fields = evaluateFields(entity); + + if (entity == null) { + return fields; + } + + Document mappedFields; + if (!fields.isEmpty()) { + mappedFields = queryMapper.getMappedFields(fields, entity); + } else { + mappedFields = propertyOperations.computeMappedFieldsForProjection(projection, fields); + mappedFields = queryMapper.addMetaAttributes(mappedFields, entity); + } + + if (entity.hasTextScoreProperty() && mappedFields.containsKey(entity.getTextScoreProperty().getFieldName()) + && !query.getQueryObject().containsKey("$text")) { + mappedFields.remove(entity.getTextScoreProperty().getFieldName()); + } + + if (mappedFields.isEmpty()) { + return BsonUtils.EMPTY_DOCUMENT; + } + + return mappedFields; + } + + private Document evaluateFields(@Nullable MongoPersistentEntity entity) { + + Document fields = query.getFieldsObject(); + + if (fields.isEmpty()) { + return BsonUtils.EMPTY_DOCUMENT; + } + + Document evaluated = new Document(); + + for (Entry entry : fields.entrySet()) { + + if (entry.getValue() instanceof MongoExpression mongoExpression) { + + AggregationOperationContext ctx = entity == null ? Aggregation.DEFAULT_CONTEXT + : new RelaxedTypeBasedAggregationOperationContext(entity.getType(), mappingContext, queryMapper); + + evaluated.put(entry.getKey(), AggregationExpression.from(mongoExpression).toDocument(ctx)); + } else { + evaluated.put(entry.getKey(), entry.getValue()); + } + } + + return evaluated; + } + + /** + * Get the already mapped {@link Query#getSortObject() sort} option. + * + * @param entity the Entity to map field names to. Can be {@literal null}. + * @return never {@literal null}. + */ + Document getMappedSort(@Nullable MongoPersistentEntity entity) { + return queryMapper.getMappedSort(query.getSortObject(), entity); + } + + /** + * Apply the {@link com.mongodb.client.model.Collation} if present extracted from the {@link Query} or fall back to + * the {@literal domain types} default {@link org.springframework.data.mongodb.core.mapping.Document#collation() + * collation}. + * + * @param domainType can be {@literal null}. + * @param consumer must not be {@literal null}. + */ + void applyCollation(@Nullable Class domainType, Consumer consumer) { + getCollation(domainType).ifPresent(consumer); + } + + /** + * Get the {@link com.mongodb.client.model.Collation} extracted from the {@link Query} if present or fall back to + * the {@literal domain types} default {@link org.springframework.data.mongodb.core.mapping.Document#collation() + * collation}. + * + * @param domainType can be {@literal null}. + * @return never {@literal null}. + */ + Optional getCollation(@Nullable Class domainType) { + + return entityOperations.forType(domainType).getCollation(query) // + .map(Collation::toMongoCollation); + } + + /** + * Get the {@link HintFunction} reading the actual hint form the {@link Query}. + * + * @return new instance of {@link HintFunction}. + * @since 4.2 + */ + HintFunction getHintFunction() { + return HintFunction.from(query.getHint()); + } + + /** + * Read and apply the hint from the {@link Query}. + * + * @since 4.2 + */ + void applyHint(Function stringConsumer, Function bsonConsumer) { + getHintFunction().ifPresent(codecRegistryProvider, stringConsumer, bsonConsumer); + } + } + + /** + * A {@link QueryContext} that encapsulates common tasks required when running {@literal distinct} queries. + * + * @author Christoph Strobl + */ + class DistinctQueryContext extends QueryContext { + + private final String fieldName; + + /** + * Create a new {@link DistinctQueryContext} instance. + * + * @param query can be {@literal null}. + * @param fieldName must not be {@literal null}. + */ + private DistinctQueryContext(@Nullable Object query, String fieldName) { + + super(query instanceof Document document ? new BasicQuery(document) : (Query) query); + this.fieldName = fieldName; + } + + @Override + Document getMappedFields(@Nullable MongoPersistentEntity entity, EntityProjection projection) { + return getMappedFields(entity); + } + + Document getMappedFields(@Nullable MongoPersistentEntity entity) { + return queryMapper.getMappedFields(new Document(fieldName, 1), entity); + } + + /** + * Get the mapped field name to project to. + * + * @param entity can be {@literal null}. + * @return never {@literal null}. + */ + String getMappedFieldName(@Nullable MongoPersistentEntity entity) { + return getMappedFields(entity).keySet().iterator().next(); + } + + /** + * Get the MongoDB native representation of the given {@literal type}. + * + * @param type must not be {@literal null}. + * @param + * @return never {@literal null}. + */ + @SuppressWarnings("unchecked") + Class getDriverCompatibleClass(Class type) { + + return codecRegistryProvider.getCodecFor(type) // + .map(Codec::getEncoderClass) // + .orElse((Class) BsonValue.class); + } + + /** + * Get the most specific read target type based on the user {@literal requestedTargetType} an the property type + * based on meta information extracted from the {@literal domainType}. + * + * @param requestedTargetType must not be {@literal null}. + * @param domainType must not be {@literal null}. + * @return never {@literal null}. + */ + Class getMostSpecificConversionTargetType(Class requestedTargetType, Class domainType) { + + Class conversionTargetType = requestedTargetType; + try { + + Class propertyType = PropertyPath.from(fieldName, domainType).getLeafProperty().getLeafType(); + + // use the more specific type but favor UserType over property one + if (ClassUtils.isAssignable(requestedTargetType, propertyType)) { + conversionTargetType = propertyType; + } + } catch (PropertyReferenceException e) { + // just don't care about it as we default to Object.class anyway. + } + + return conversionTargetType; + } + } + + /** + * A {@link QueryContext} that encapsulates common tasks required when running {@literal count} queries. + * + * @author Christoph Strobl + */ + class CountContext extends QueryContext { + + /** + * Creates a new {@link CountContext} instance. + * + * @param query can be {@literal null}. + */ + CountContext(@Nullable Query query) { + super(query); + } + + /** + * Get the {@link CountOptions} applicable for the {@link Query}. + * + * @param domainType must not be {@literal null}. + * @return never {@literal null}. + */ + CountOptions getCountOptions(@Nullable Class domainType) { + return getCountOptions(domainType, null); + } + + /** + * Get the {@link CountOptions} applicable for the {@link Query}. + * + * @param domainType can be {@literal null}. + * @param callback a callback to modify the generated options. Can be {@literal null}. + * @return + */ + CountOptions getCountOptions(@Nullable Class domainType, @Nullable Consumer callback) { + + CountOptions options = new CountOptions(); + Query query = getQuery(); + + applyCollation(domainType, options::collation); + + if (query.getLimit() > 0) { + options.limit(query.getLimit()); + } + + if (query.getSkip() > 0) { + options.skip((int) query.getSkip()); + } + + Meta meta = query.getMeta(); + if (meta.hasValues()) { + + if (meta.hasMaxTime()) { + options.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS); + } + + if (meta.hasComment()) { + options.comment(meta.getComment()); + } + } + + HintFunction hintFunction = HintFunction.from(query.getHint()); + + if (hintFunction.isPresent()) { + options = hintFunction.apply(codecRegistryProvider, options::hintString, options::hint); + } + + if (callback != null) { + callback.accept(options); + } + + return options; + } + } + + /** + * A {@link QueryContext} that encapsulates common tasks required when running {@literal delete} queries. + * + * @author Christoph Strobl + */ + class DeleteContext extends QueryContext { + + private final boolean multi; + + /** + * Crate a new {@link DeleteContext} instance. + * + * @param query can be {@literal null}. + * @param multi use {@literal true} to remove all matching documents, {@literal false} for just the first one. + */ + DeleteContext(@Nullable Query query, boolean multi) { + + super(query); + this.multi = multi; + } + + /** + * Get the {@link DeleteOptions} applicable for the {@link Query}. + * + * @param domainType must not be {@literal null}. + * @return never {@literal null}. + */ + DeleteOptions getDeleteOptions(@Nullable Class domainType) { + return getDeleteOptions(domainType, null); + } + + /** + * Get the {@link DeleteOptions} applicable for the {@link Query}. + * + * @param domainType can be {@literal null}. + * @param callback a callback to modify the generated options. Can be {@literal null}. + * @return + */ + DeleteOptions getDeleteOptions(@Nullable Class domainType, @Nullable Consumer callback) { + + DeleteOptions options = new DeleteOptions(); + applyCollation(domainType, options::collation); + + if (callback != null) { + callback.accept(options); + } + + return options; + } + + /** + * @return {@literal true} if all matching documents shall be deleted. + */ + boolean isMulti() { + return multi; + } + } + + /** + * A {@link QueryContext} that encapsulates common tasks required when running {@literal updates}. + */ + class UpdateContext extends QueryContext { + + private final boolean multi; + private final boolean upsert; + private final @Nullable UpdateDefinition update; + private final @Nullable MappedDocument mappedDocument; + + /** + * Create a new {@link UpdateContext} instance. + * + * @param update must not be {@literal null}. + * @param query must not be {@literal null}. + * @param multi use {@literal true} to update all matching documents. + * @param upsert use {@literal true} to insert a new document if none match. + */ + UpdateContext(UpdateDefinition update, Document query, boolean multi, boolean upsert) { + this(update, new BasicQuery(query), multi, upsert); + } + + /** + * Create a new {@link UpdateContext} instance. + * + * @param update must not be {@literal null}. + * @param query can be {@literal null}. + * @param multi use {@literal true} to update all matching documents. + * @param upsert use {@literal true} to insert a new document if none match. + */ + UpdateContext(UpdateDefinition update, @Nullable Query query, boolean multi, boolean upsert) { + + super(query); + + this.multi = multi; + this.upsert = upsert; + this.update = update; + this.mappedDocument = null; + } + + UpdateContext(MappedDocument update, boolean upsert) { + this(new BasicQuery(BsonUtils.asDocument(update.getIdFilter())), update, upsert); + } + + UpdateContext(Query query, MappedDocument update, boolean upsert) { + + super(query); + this.multi = false; + this.upsert = upsert; + this.mappedDocument = update; + this.update = null; + } + + /** + * Get the {@link UpdateOptions} applicable for the {@link Query}. + * + * @param domainType can be {@literal null}. + * @return never {@literal null}. + */ + UpdateOptions getUpdateOptions(@Nullable Class domainType) { + return getUpdateOptions(domainType, null); + } + + /** + * Get the {@link UpdateOptions} applicable for the {@link Query}. + * + * @param domainType can be {@literal null}. + * @param query can be {@literal null} + * @return never {@literal null}. + */ + UpdateOptions getUpdateOptions(@Nullable Class domainType, @Nullable Query query) { + UpdateOptions options = new UpdateOptions(); + options.upsert(upsert); + + if (update != null && update.hasArrayFilters()) { + options + .arrayFilters(update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList())); + } + + if (query != null && query.isSorted()) { + options.sort(getMappedSort(domainType != null ? mappingContext.getPersistentEntity(domainType) : null)); + } + + HintFunction.from(getQuery().getHint()).ifPresent(codecRegistryProvider, options::hintString, options::hint); + applyCollation(domainType, options::collation); + + return options; + } + + /** + * Get the {@link ReplaceOptions} applicable for the {@link Query}. + * + * @param domainType must not be {@literal null}. + * @return never {@literal null}. + */ + ReplaceOptions getReplaceOptions(@Nullable Class domainType) { + return getReplaceOptions(domainType, null); + } + + /** + * Get the {@link ReplaceOptions} applicable for the {@link Query}. + * + * @param domainType can be {@literal null}. + * @param callback a callback to modify the generated options. Can be {@literal null}. + * @return + */ + ReplaceOptions getReplaceOptions(@Nullable Class domainType, @Nullable Consumer callback) { + + UpdateOptions updateOptions = getUpdateOptions(domainType); + + ReplaceOptions options = new ReplaceOptions(); + options.collation(updateOptions.getCollation()); + options.upsert(updateOptions.isUpsert()); + applyHint(options::hintString, options::hint); + if (!isMulti() && getQuery().isSorted()) { + options.sort(getMappedSort(domainType != null ? mappingContext.getPersistentEntity(domainType) : null)); + } + + if (callback != null) { + callback.accept(options); + } + + return options; + } + + @Override + Document getMappedQuery(@Nullable MongoPersistentEntity domainType) { + return applyIsolation(super.getMappedQuery(domainType)); + } + + /** + * A replacement query that is derived from the already {@link MappedDocument}. + * + * @return + */ + Document getReplacementQuery() { + return applyIsolation(getQueryObject()); + } + + private Document applyIsolation(Document mappedQuery) { + if (multi && update != null && update.isIsolated() && !mappedQuery.containsKey("$isolated")) { + mappedQuery = new Document(mappedQuery); + mappedQuery.put("$isolated", 1); + } + return mappedQuery; + } + + Document applyShardKey(MongoPersistentEntity domainType, Document filter, @Nullable Document existing) { + + Document shardKeySource = existing != null ? existing + : mappedDocument != null ? mappedDocument.getDocument() : getMappedUpdate(domainType); + + Document filterWithShardKey = new Document(filter); + getMappedShardKeyFields(domainType) + .forEach(key -> filterWithShardKey.putIfAbsent(key, BsonUtils.resolveValue((Bson) shardKeySource, key))); + + return filterWithShardKey; + } + + boolean requiresShardKey(Document filter, @Nullable MongoPersistentEntity domainType) { + + return !multi && domainType != null && domainType.isSharded() && !shardedById(domainType) + && !filter.keySet().containsAll(getMappedShardKeyFields(domainType)); + } + + /** + * @return {@literal true} if the {@link MongoPersistentEntity#getShardKey() shard key} is the entities + * {@literal id} property. + * @since 3.0 + */ + private boolean shardedById(MongoPersistentEntity domainType) { + + ShardKey shardKey = domainType.getShardKey(); + if (shardKey.size() != 1) { + return false; + } + + String key = shardKey.getPropertyNames().iterator().next(); + if (FieldName.ID.name().equals(key)) { + return true; + } + + MongoPersistentProperty idProperty = domainType.getIdProperty(); + return idProperty != null && idProperty.getName().equals(key); + } + + Set getMappedShardKeyFields(MongoPersistentEntity entity) { + return getMappedShardKey(entity).keySet(); + } + + Document getMappedShardKey(MongoPersistentEntity entity) { + return mappedShardKey.computeIfAbsent(entity.getType(), + key -> queryMapper.getMappedFields(entity.getShardKey().getDocument(), entity)); + } + + /** + * Get the already mapped aggregation pipeline to use with an {@link #isAggregationUpdate()}. + * + * @param domainType must not be {@literal null}. + * @return never {@literal null}. + */ + List getUpdatePipeline(@Nullable Class domainType) { + + Assert.isInstanceOf(AggregationUpdate.class, update); + + Class type = domainType != null ? domainType : Object.class; + + AggregationOperationContext context = new RelaxedTypeBasedAggregationOperationContext(type, mappingContext, + queryMapper); + return aggregationUtil.createPipeline((AggregationUpdate) update, context); + } + + /** + * Get the already mapped update {@link Document}. + * + * @param entity + * @return + */ + @SuppressWarnings("NullAway") + Document getMappedUpdate(@Nullable MongoPersistentEntity entity) { + + if (update != null) { + return update instanceof MappedUpdate ? update.getUpdateObject() + : updateMapper.getMappedObject(update.getUpdateObject(), entity); + } + return mappedDocument.getDocument(); + } + + /** + * Increase a potential {@link MongoPersistentEntity#getVersionProperty() version property} prior to update if not + * already done in the actual {@link UpdateDefinition} + * + * @param persistentEntity can be {@literal null}. + */ + void increaseVersionForUpdateIfNecessary(@Nullable MongoPersistentEntity persistentEntity) { + + if (persistentEntity != null && persistentEntity.hasVersionProperty()) { + + String versionFieldName = persistentEntity.getRequiredVersionProperty().getFieldName(); + if (update != null && !update.modifies(versionFieldName)) { + update.inc(versionFieldName); + } + } + } + + /** + * @return {@literal true} if the update holds an aggregation pipeline. + */ + boolean isAggregationUpdate() { + return update instanceof AggregationUpdate; + } + + /** + * @return {@literal true} if all matching documents should be updated. + */ + boolean isMulti() { + return multi; + } + } + + /** + * A value object that encapsulates common tasks required when running {@literal aggregations}. + * + * @since 3.2 + */ + class AggregationDefinition { + + private final Aggregation aggregation; + private final Lazy aggregationOperationContext; + private final Lazy> pipeline; + private final @Nullable Class inputType; + + /** + * Creates new instance of {@link AggregationDefinition} extracting the input type from either the + * {@link org.springframework.data.mongodb.core.aggregation.Aggregation} in case of a {@link TypedAggregation} or + * the given {@literal aggregationOperationContext} if present.
          + * Creates a new {@link AggregationOperationContext} if none given, based on the {@link Aggregation} input type and + * the desired {@link AggregationOptions#getDomainTypeMapping() domain type mapping}.
          + * Pipelines are mapped on first access of {@link #getAggregationPipeline()} and cached for reuse. + * + * @param aggregation the source aggregation. + * @param aggregationOperationContext can be {@literal null}. + */ + AggregationDefinition(Aggregation aggregation, @Nullable AggregationOperationContext aggregationOperationContext) { + + this.aggregation = aggregation; + + if (aggregation instanceof TypedAggregation typedAggregation) { + this.inputType = typedAggregation.getInputType(); + } else if (aggregationOperationContext instanceof TypeBasedAggregationOperationContext typeBasedAggregationOperationContext) { + this.inputType = typeBasedAggregationOperationContext.getType(); + } else { + this.inputType = null; + } + + this.aggregationOperationContext = Lazy.of(() -> aggregationOperationContext != null ? aggregationOperationContext + : aggregationUtil.createAggregationContext(aggregation, getInputType())); + this.pipeline = Lazy.of(() -> aggregationUtil.createPipeline(this.aggregation, getAggregationOperationContext())); + } + + /** + * Creates new instance of {@link AggregationDefinition} extracting the input type from either the + * {@link org.springframework.data.mongodb.core.aggregation.Aggregation} in case of a {@link TypedAggregation} or + * the given {@literal aggregationOperationContext} if present.
          + * Creates a new {@link AggregationOperationContext} based on the {@link Aggregation} input type and the desired + * {@link AggregationOptions#getDomainTypeMapping() domain type mapping}.
          + * Pipelines are mapped on first access of {@link #getAggregationPipeline()} and cached for reuse. + * + * @param aggregation the source aggregation. + * @param inputType can be {@literal null}. + */ + AggregationDefinition(Aggregation aggregation, @Nullable Class inputType) { + + this.aggregation = aggregation; + + if (aggregation instanceof TypedAggregation typedAggregation) { + this.inputType = typedAggregation.getInputType(); + } else { + this.inputType = inputType; + } + + this.aggregationOperationContext = Lazy + .of(() -> aggregationUtil.createAggregationContext(aggregation, getInputType())); + this.pipeline = Lazy.of(() -> aggregationUtil.createPipeline(this.aggregation, getAggregationOperationContext())); + } + + /** + * Obtain the already mapped pipeline. + * + * @return never {@literal null}. + */ + List getAggregationPipeline() { + return pipeline.get(); + } + + /** + * @return {@literal true} if the last aggregation stage is either {@literal $out} or {@literal $merge}. + * @see AggregationPipeline#isOutOrMerge() + */ + boolean isOutOrMerge() { + return aggregation.getPipeline().isOutOrMerge(); + } + + /** + * Obtain the {@link AggregationOperationContext} used for mapping the pipeline. + * + * @return never {@literal null}. + */ + AggregationOperationContext getAggregationOperationContext() { + return aggregationOperationContext.get(); + } + + /** + * @return the input type to map the pipeline against. Can be {@literal null}. + */ + @Nullable + Class getInputType() { + return inputType; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryResultConverter.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryResultConverter.java new file mode 100644 index 0000000000..ca93940a9c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/QueryResultConverter.java @@ -0,0 +1,85 @@ +/* + * Copyright 2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.bson.Document; + +/** + * Converter for MongoDB query results. + *

          + * This is a functional interface that allows for mapping a {@link Document} to a result type. + * {@link #mapDocument(Document, ConversionResultSupplier) row mapping} can obtain upstream a + * {@link ConversionResultSupplier upstream converter} to enrich the final result object. This is useful when e.g. + * wrapping result objects where the wrapper needs to obtain information from the actual {@link Document}. + * + * @param object type accepted by this converter. + * @param the returned result type. + * @author Mark Paluch + * @since 5.0 + */ +@FunctionalInterface +public interface QueryResultConverter { + + /** + * Returns a function that returns the materialized entity. + * + * @param the type of the input and output entity to the function. + * @return a function that returns the materialized entity. + */ + @SuppressWarnings("unchecked") + static QueryResultConverter entity() { + return (QueryResultConverter) EntityResultConverter.INSTANCE; + } + + /** + * Map a {@link Document} that is read from the MongoDB query/aggregation operation to a query result. + * + * @param document the raw document from the MongoDB query/aggregation result. + * @param reader reader object that supplies an upstream result from an earlier converter. + * @return the mapped result. + */ + R mapDocument(Document document, ConversionResultSupplier reader); + + /** + * Returns a composed function that first applies this function to its input, and then applies the {@code after} + * function to the result. If evaluation of either function throws an exception, it is relayed to the caller of the + * composed function. + * + * @param the type of output of the {@code after} function, and of the composed function. + * @param after the function to apply after this function is applied. + * @return a composed function that first applies this function and then applies the {@code after} function. + */ + default QueryResultConverter andThen(QueryResultConverter after) { + return (row, reader) -> after.mapDocument(row, () -> mapDocument(row, reader)); + } + + /** + * A supplier that converts a {@link Document} into {@code T}. Allows for lazy reading of query results. + * + * @param type of the returned result. + */ + interface ConversionResultSupplier { + + /** + * Obtain the upstream conversion result. + * + * @return the upstream conversion result. + */ + T get(); + + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java index 2adb19bf85..99c94b19e4 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,6 +18,7 @@ import reactor.core.publisher.Flux; import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.lang.Contract; /** * {@link ReactiveAggregationOperation} allows creation and execution of reactive MongoDB aggregation operations in a @@ -44,7 +45,7 @@ public interface ReactiveAggregationOperation { /** * Start creating an aggregation operation that returns results mapped to the given domain type.
          * Use {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} to specify a potentially different - * input type for he aggregation. + * input type for the aggregation. * * @param domainType must not be {@literal null}. * @return new instance of {@link ReactiveAggregation}. Never {@literal null}. @@ -73,6 +74,18 @@ interface AggregationOperationWithCollection { */ interface TerminatingAggregationOperation { + /** + * Map the query result to a different type using {@link QueryResultConverter}. + * + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link ExecutableFindOperation.TerminatingFindNear}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since 5.0 + */ + @Contract("_ -> new") + TerminatingAggregationOperation map(QueryResultConverter converter); + /** * Apply pipeline operations as specified and stream all matching elements.
          * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java index bdcc96949a..fbaff2bc39 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveAggregationOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,7 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; +import org.jspecify.annotations.Nullable; import reactor.core.publisher.Flux; import org.springframework.data.mongodb.core.aggregation.Aggregation; @@ -45,64 +42,70 @@ class ReactiveAggregationOperationSupport implements ReactiveAggregationOperatio */ ReactiveAggregationOperationSupport(ReactiveMongoTemplate template) { - Assert.notNull(template, "Template must not be null!"); + Assert.notNull(template, "Template must not be null"); this.template = template; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveAggregationOperation#aggregateAndReturn(java.lang.Class) - */ @Override public ReactiveAggregation aggregateAndReturn(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); - return new ReactiveAggregationSupport<>(template, domainType, null, null); + return new ReactiveAggregationSupport<>(template, domainType, QueryResultConverter.entity(), null, null); } - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) - static class ReactiveAggregationSupport + static class ReactiveAggregationSupport implements AggregationOperationWithAggregation, ReactiveAggregation, TerminatingAggregationOperation { - @NonNull ReactiveMongoTemplate template; - @NonNull Class domainType; - Aggregation aggregation; - String collection; + private final ReactiveMongoTemplate template; + private final Class domainType; + private final QueryResultConverter resultConverter; + private final @Nullable Aggregation aggregation; + private final @Nullable String collection; + + ReactiveAggregationSupport(ReactiveMongoTemplate template, Class domainType, + QueryResultConverter resultConverter, @Nullable Aggregation aggregation, + @Nullable String collection) { + + this.template = template; + this.domainType = domainType; + this.resultConverter = resultConverter; + this.aggregation = aggregation; + this.collection = collection; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveAggregationOperation.AggregationOperationWithCollection#inCollection(java.lang.String) - */ @Override public AggregationOperationWithAggregation inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); - return new ReactiveAggregationSupport<>(template, domainType, aggregation, collection); + return new ReactiveAggregationSupport<>(template, domainType, resultConverter, aggregation, collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveAggregationOperation.AggregationOperationWithAggregation#by(org.springframework.data.mongodb.core.Aggregation) - */ @Override public TerminatingAggregationOperation by(Aggregation aggregation) { - Assert.notNull(aggregation, "Aggregation must not be null!"); + Assert.notNull(aggregation, "Aggregation must not be null"); + + return new ReactiveAggregationSupport<>(template, domainType, resultConverter, aggregation, collection); + } + + @Override + public TerminatingAggregationOperation map(QueryResultConverter converter) { + + Assert.notNull(converter, "QueryResultConverter must not be null"); - return new ReactiveAggregationSupport<>(template, domainType, aggregation, collection); + return new ReactiveAggregationSupport<>(template, domainType, resultConverter.andThen(converter), aggregation, + collection); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveAggregationOperation.TerminatingAggregationOperation#all() - */ @Override public Flux all() { - return template.aggregate(aggregation, getCollectionName(aggregation), domainType); + + Assert.notNull(aggregation, "Aggregation must be set first"); + + return template.doAggregate(aggregation, getCollectionName(aggregation), domainType, domainType, resultConverter); } private String getCollectionName(Aggregation aggregation) { @@ -111,16 +114,14 @@ private String getCollectionName(Aggregation aggregation) { return collection; } - if (aggregation instanceof TypedAggregation) { - - TypedAggregation typedAggregation = (TypedAggregation) aggregation; + if (aggregation instanceof TypedAggregation typedAggregation) { if (typedAggregation.getInputType() != null) { - return template.determineCollectionName(typedAggregation.getInputType()); + return template.getCollectionName(typedAggregation.getInputType()); } } - return template.determineCollectionName(domainType); + return template.getCollectionName(domainType); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveBulkOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveBulkOperations.java new file mode 100644 index 0000000000..7f88b63f28 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveBulkOperations.java @@ -0,0 +1,144 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Mono; + +import java.util.List; + +import org.springframework.data.domain.Sort; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.UpdateDefinition; + +import com.mongodb.bulk.BulkWriteResult; + +/** + * Bulk operations for insert/update/remove actions on a collection. Bulk operations are available since MongoDB 2.6 and + * make use of low level bulk commands on the protocol level. This interface defines a fluent API to add multiple single + * operations or list of similar operations in sequence which can then eventually be executed by calling + * {@link #execute()}. + * + *

          + * ReactiveMongoOperations ops = …;
          + *
          + * ops.bulkOps(BulkMode.UNORDERED, Person.class)
          + * 				.insert(newPerson)
          + * 				.updateOne(where("firstname").is("Joe"), Update.update("lastname", "Doe"))
          + * 				.execute();
          + * 
          + *

          + * Bulk operations are issued as one batch that pulls together all insert, update, and delete operations. Operations + * that require individual operation results such as optimistic locking (using {@code @Version}) are not supported and + * the version field remains not populated. + * + * @author Christoph Strobl + * @since 4.1 + */ +public interface ReactiveBulkOperations { + + /** + * Add a single insert to the bulk operation. + * + * @param documents the document to insert, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the insert added, will never be {@literal null}. + */ + ReactiveBulkOperations insert(Object documents); + + /** + * Add a list of inserts to the bulk operation. + * + * @param documents List of documents to insert, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the insert added, will never be {@literal null}. + */ + ReactiveBulkOperations insert(List documents); + + /** + * Add a single update to the bulk operation. For the update request, only the first matching document is updated. + * + * @param query update criteria, must not be {@literal null}. The {@link Query} may define a {@link Query#with(Sort) + * sort order} to influence which document to update when potentially matching multiple candidates. + * @param update {@link UpdateDefinition} operation to perform, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}. + */ + ReactiveBulkOperations updateOne(Query query, UpdateDefinition update); + + /** + * Add a single update to the bulk operation. For the update request, all matching documents are updated. + * + * @param query Update criteria. + * @param update Update operation to perform. + * @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}. + */ + ReactiveBulkOperations updateMulti(Query query, UpdateDefinition update); + + /** + * Add a single upsert to the bulk operation. An upsert is an update if the set of matching documents is not empty, + * else an insert. + * + * @param query Update criteria. + * @param update Update operation to perform. + * @return the current {@link ReactiveBulkOperations} instance with the update added, will never be {@literal null}. + */ + ReactiveBulkOperations upsert(Query query, UpdateDefinition update); + + /** + * Add a single remove operation to the bulk operation. + * + * @param remove the {@link Query} to select the documents to be removed, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the removal added, will never be {@literal null}. + */ + ReactiveBulkOperations remove(Query remove); + + /** + * Add a list of remove operations to the bulk operation. + * + * @param removes the remove operations to perform, must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the removal added, will never be {@literal null}. + */ + ReactiveBulkOperations remove(List removes); + + /** + * Add a single replace operation to the bulk operation. + * + * @param query Replace criteria. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence + * which document to replace when potentially matching multiple candidates. + * @param replacement the replacement document. Must not be {@literal null}. The {@link Query} may define a + * {@link Query#with(Sort) sort order} to influence which document to replace when potentially matching + * multiple candidates. + * @return the current {@link ReactiveBulkOperations} instance with the replace added, will never be {@literal null}. + */ + default ReactiveBulkOperations replaceOne(Query query, Object replacement) { + return replaceOne(query, replacement, FindAndReplaceOptions.empty()); + } + + /** + * Add a single replace operation to the bulk operation. + * + * @param query Replace criteria. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence + * which document to replace when potentially matching multiple candidates. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. + * @return the current {@link ReactiveBulkOperations} instance with the replace added, will never be {@literal null}. + */ + ReactiveBulkOperations replaceOne(Query query, Object replacement, FindAndReplaceOptions options); + + /** + * Execute all bulk operations using the default write concern. + * + * @return a {@link Mono} emitting the result of the bulk operation providing counters for inserts/updates etc. + */ + Mono execute(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java new file mode 100644 index 0000000000..4f936e0ffa --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperation.java @@ -0,0 +1,200 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; + +import java.time.Instant; +import java.util.function.Consumer; + +import org.bson.BsonTimestamp; +import org.bson.BsonValue; +import org.springframework.data.mongodb.core.ChangeStreamOptions.ChangeStreamOptionsBuilder; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; + +/** + * {@link ReactiveChangeStreamOperation} allows creation and execution of reactive MongoDB + * Change Stream operations in a fluent API style.
          + * The starting {@literal domainType} is used for mapping a potentially given + * {@link org.springframework.data.mongodb.core.aggregation.TypedAggregation} used for filtering. By default, the + * originating {@literal domainType} is also used for mapping back the result from the {@link org.bson.Document}. + * However, it is possible to define an different {@literal returnType} via {@code as}.
          + * The collection to operate on is optional in which case call collection with the actual database are watched, use + * {@literal watchCollection} to define a fixed collection. + * + *

          + *     
          + *         changeStream(Jedi.class)
          + *             .watchCollection("star-wars")
          + *             .filter(where("operationType").is("insert"))
          + *             .resumeAt(Instant.now())
          + *             .listen();
          + *     
          + * 
          + * + * @author Christoph Strobl + * @since 2.2 + */ +public interface ReactiveChangeStreamOperation { + + /** + * Start creating a change stream operation for the given {@literal domainType} watching all collections within the + * database.
          + * Consider limiting events be defining a {@link ChangeStreamWithCollection#watchCollection(String) collection} and/or + * {@link ChangeStreamWithFilterAndProjection#filter(CriteriaDefinition) filter}. + * + * @param domainType must not be {@literal null}. Use {@link org.bson.Document} to obtain raw elements. + * @return new instance of {@link ReactiveChangeStream}. Never {@literal null}. + * @throws IllegalArgumentException if domainType is {@literal null}. + */ + ReactiveChangeStream changeStream(Class domainType); + + /** + * Compose change stream execution by calling one of the terminating methods. + */ + interface TerminatingChangeStream { + + /** + * Start listening to changes. The stream will not be completed unless the {@link org.reactivestreams.Subscription} + * is {@link org.reactivestreams.Subscription#cancel() canceled}. + *
          + * However, the stream may become dead, or invalid, if all watched collections, databases are dropped. + */ + Flux> listen(); + } + + /** + * Collection override (optional). + */ + interface ChangeStreamWithCollection { + + /** + * Explicitly set the name of the collection to watch.
          + * Skip this step to watch all collections within the database. + * + * @param collection must not be {@literal null} nor {@literal empty}. + * @return new instance of {@link ChangeStreamWithFilterAndProjection}. + * @throws IllegalArgumentException if {@code collection} is {@literal null}. + */ + ChangeStreamWithFilterAndProjection watchCollection(String collection); + + /** + * Set the the collection to watch. Collection name is derived from the {@link Class entityClass}.
          + * Skip this step to watch all collections within the database. + * + * @param entityClass must not be {@literal null}. + * @return new instance of {@link ChangeStreamWithFilterAndProjection}. + * @throws IllegalArgumentException if {@code entityClass} is {@literal null}. + */ + ChangeStreamWithFilterAndProjection watchCollection(Class entityClass); + } + + /** + * Provide a filter for limiting results (optional). + */ + interface ChangeStreamWithFilterAndProjection extends ResumingChangeStream, TerminatingChangeStream { + + /** + * Use an {@link Aggregation} to filter matching events. + * + * @param by must not be {@literal null}. + * @return new instance of {@link ChangeStreamWithFilterAndProjection}. + * @throws IllegalArgumentException if the given {@link Aggregation} is {@literal null}. + */ + ChangeStreamWithFilterAndProjection filter(Aggregation by); + + /** + * Use a {@link CriteriaDefinition critera} to filter matching events via an + * {@link org.springframework.data.mongodb.core.aggregation.MatchOperation}. + * + * @param by must not be {@literal null}. + * @return new instance of {@link ChangeStreamWithFilterAndProjection}. + * @throws IllegalArgumentException if the given {@link CriteriaDefinition} is {@literal null}. + */ + ChangeStreamWithFilterAndProjection filter(CriteriaDefinition by); + + /** + * Define the target type fields should be mapped to. + * + * @param resultType must not be {@literal null}. + * @param result type. + * @return new instance of {@link ChangeStreamWithFilterAndProjection}. + * @throws IllegalArgumentException if resultType is {@literal null}. + */ + ChangeStreamWithFilterAndProjection as(Class resultType); + } + + /** + * Resume a change stream. (optional). + */ + interface ResumingChangeStream extends TerminatingChangeStream { + + /** + * Resume the change stream at a given point. + * + * @param token an {@link Instant} or {@link BsonTimestamp} + * @return new instance of {@link TerminatingChangeStream}. + * @see ChangeStreamOptionsBuilder#resumeAt(Instant) + * @see ChangeStreamOptionsBuilder#resumeAt(BsonTimestamp) + * @throws IllegalArgumentException if the given beacon is neither {@link Instant} nor {@link BsonTimestamp}. + */ + TerminatingChangeStream resumeAt(Object token); + + /** + * Resume the change stream after a given point. + * + * @param token an {@link Instant} or {@link BsonTimestamp} + * @return new instance of {@link TerminatingChangeStream}. + * @see ChangeStreamOptionsBuilder#resumeAfter(BsonValue) + * @see ChangeStreamOptionsBuilder#resumeToken(BsonValue) + * @throws IllegalArgumentException if the given beacon not a {@link BsonValue}. + */ + TerminatingChangeStream resumeAfter(Object token); + + /** + * Start the change stream after a given point. + * + * @param token an {@link Instant} or {@link BsonTimestamp} + * @return new instance of {@link TerminatingChangeStream}. + * @see ChangeStreamOptionsBuilder#startAfter(BsonValue) (BsonValue) + * @throws IllegalArgumentException if the given beacon not a {@link BsonValue}. + */ + TerminatingChangeStream startAfter(Object token); + } + + /** + * Provide some options. + */ + interface ChangeStreamWithOptions { + + /** + * Provide some options via the callback by modifying the given {@link ChangeStreamOptionsBuilder}. Previously + * defined options like a {@link ResumingChangeStream#resumeAfter(Object) resumeToken} are carried over to the + * builder and can be overwritten via eg. {@link ChangeStreamOptionsBuilder#resumeToken(BsonValue)}. + * + * @param optionsConsumer never {@literal null}. + * @return new instance of {@link ReactiveChangeStream}. + */ + ReactiveChangeStream withOptions(Consumer optionsConsumer); + } + + /** + * {@link ReactiveChangeStream} provides methods for constructing change stream operations in a fluent way. + */ + interface ReactiveChangeStream extends ChangeStreamWithOptions, ChangeStreamWithCollection, + TerminatingChangeStream, ResumingChangeStream, ChangeStreamWithFilterAndProjection {} +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupport.java new file mode 100644 index 0000000000..589f264f17 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveChangeStreamOperationSupport.java @@ -0,0 +1,187 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import reactor.core.publisher.Flux; + +import java.time.Instant; +import java.util.List; +import java.util.function.Consumer; + +import org.bson.BsonTimestamp; +import org.bson.BsonValue; +import org.bson.Document; +import org.jspecify.annotations.Nullable; +import org.springframework.data.mongodb.core.ChangeStreamOptions.ChangeStreamOptionsBuilder; +import org.springframework.data.mongodb.core.aggregation.Aggregation; +import org.springframework.data.mongodb.core.aggregation.MatchOperation; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; +import org.springframework.util.Assert; + +/** + * @author Christoph Strobl + * @since 2.2 + */ +class ReactiveChangeStreamOperationSupport implements ReactiveChangeStreamOperation { + + private final ReactiveMongoTemplate template; + + /** + * @param template must not be {@literal null}. + */ + ReactiveChangeStreamOperationSupport(ReactiveMongoTemplate template) { + this.template = template; + } + + @Override + public ReactiveChangeStream changeStream(Class domainType) { + + Assert.notNull(domainType, "DomainType must not be null"); + return new ReactiveChangeStreamSupport<>(template, domainType, domainType, null, null); + } + + static class ReactiveChangeStreamSupport + implements ReactiveChangeStream, ChangeStreamWithFilterAndProjection { + + private final ReactiveMongoTemplate template; + private final Class domainType; + private final Class returnType; + private final @Nullable String collection; + private final @Nullable ChangeStreamOptions options; + + private ReactiveChangeStreamSupport(ReactiveMongoTemplate template, Class domainType, Class returnType, + @Nullable String collection, @Nullable ChangeStreamOptions options) { + + this.template = template; + this.domainType = domainType; + this.returnType = returnType; + this.collection = collection; + this.options = options; + } + + @Override + public ChangeStreamWithFilterAndProjection watchCollection(String collection) { + + Assert.hasText(collection, "Collection name must not be null nor empty"); + + return new ReactiveChangeStreamSupport<>(template, domainType, returnType, collection, options); + } + + @Override + public ChangeStreamWithFilterAndProjection watchCollection(Class entityClass) { + + Assert.notNull(entityClass, "Collection type not be null"); + + return watchCollection(template.getCollectionName(entityClass)); + } + + @Override + public TerminatingChangeStream resumeAt(Object token) { + + return withOptions(builder -> { + + if (token instanceof Instant instant) { + builder.resumeAt(instant); + } else if (token instanceof BsonTimestamp bsonTimestamp) { + builder.resumeAt(bsonTimestamp); + } + }); + } + + @Override + public TerminatingChangeStream resumeAfter(Object token) { + + Assert.isInstanceOf(BsonValue.class, token, "Token must be a BsonValue"); + + return withOptions(builder -> builder.resumeAfter((BsonValue) token)); + } + + @Override + public TerminatingChangeStream startAfter(Object token) { + + Assert.isInstanceOf(BsonValue.class, token, "Token must be a BsonValue"); + + return withOptions(builder -> builder.startAfter((BsonValue) token)); + } + + @Override + public ReactiveChangeStreamSupport withOptions(Consumer optionsConsumer) { + + ChangeStreamOptionsBuilder builder = initOptionsBuilder(); + optionsConsumer.accept(builder); + + return new ReactiveChangeStreamSupport<>(template, domainType, returnType, collection, builder.build()); + } + + @Override + public ChangeStreamWithFilterAndProjection as(Class resultType) { + + Assert.notNull(resultType, "ResultType must not be null"); + + return new ReactiveChangeStreamSupport<>(template, domainType, resultType, collection, options); + } + + @Override + public ChangeStreamWithFilterAndProjection filter(Aggregation filter) { + return withOptions(builder -> builder.filter(filter)); + } + + @Override + public ChangeStreamWithFilterAndProjection filter(CriteriaDefinition by) { + + MatchOperation $match = Aggregation.match(by); + Aggregation aggregation = !Document.class.equals(domainType) ? Aggregation.newAggregation(domainType, $match) + : Aggregation.newAggregation($match); + return filter(aggregation); + } + + @Override + public Flux> listen() { + return template.changeStream(collection, options != null ? options : ChangeStreamOptions.empty(), returnType); + } + + private ChangeStreamOptionsBuilder initOptionsBuilder() { + + ChangeStreamOptionsBuilder builder = ChangeStreamOptions.builder(); + if (options == null) { + return builder; + } + + options.getFilter().ifPresent(it -> { + if (it instanceof Aggregation aggregation) { + builder.filter(aggregation); + } else { + builder.filter(((List) it).toArray(new Document[0])); + } + }); + options.getFullDocumentLookup().ifPresent(builder::fullDocumentLookup); + options.getFullDocumentBeforeChangeLookup().ifPresent(builder::fullDocumentBeforeChangeLookup); + options.getCollation().ifPresent(builder::collation); + + if (options.isResumeAfter()) { + options.getResumeToken().ifPresent(builder::resumeAfter); + options.getResumeBsonTimestamp().ifPresent(builder::resumeAfter); + } else if (options.isStartAfter()) { + options.getResumeToken().ifPresent(builder::startAfter); + } else { + options.getResumeTimestamp().ifPresent(builder::resumeAt); + options.getResumeBsonTimestamp().ifPresent(builder::resumeAt); + } + + return builder; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java index ac138e3577..dda6bf1b96 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveCollectionCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java index bb1c946383..470fd05ef7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveDatabaseCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java index 8dbc357d3b..eaa9da4a37 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,9 +18,14 @@ import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Window; import org.springframework.data.geo.GeoResult; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Contract; /** * {@link ReactiveFindOperation} allows creation and execution of reactive MongoDB find operations in a fluent API @@ -38,13 +43,14 @@ * query(Human.class) * .inCollection("star-wars") * .as(Jedi.class) - * .matching(query(where("firstname").is("luke"))) + * .matching(where("firstname").is("luke")) * .all(); * * * * @author Mark Paluch * @author Christoph Strobl + * @author Juergen Zimmermann * @since 2.0 */ public interface ReactiveFindOperation { @@ -61,7 +67,28 @@ public interface ReactiveFindOperation { /** * Compose find execution by calling one of the terminating methods. */ - interface TerminatingFind { + interface TerminatingFind extends TerminatingResults, TerminatingProjection { + + } + + /** + * Compose find execution by calling one of the terminating methods. + * + * @since 5.0 + */ + interface TerminatingResults { + + /** + * Map the query result to a different type using {@link QueryResultConverter}. + * + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link TerminatingResults}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since 5.0 + */ + @Contract("_ -> new") + TerminatingResults map(QueryResultConverter converter); /** * Get exactly zero or one result. @@ -86,7 +113,51 @@ interface TerminatingFind { Flux all(); /** - * Get the number of matching elements. + * Return a scroll of elements either starting or resuming at {@link ScrollPosition}. + *

          + * When using {@link KeysetScrollPosition}, make sure to use non-nullable + * {@link org.springframework.data.domain.Sort sort properties} as MongoDB does not support criteria to reconstruct + * a query result from absent document fields or {@literal null} values through {@code $gt/$lt} operators. + * + * @param scrollPosition the scroll position. + * @return a scroll of the resulting elements. + * @since 4.1 + * @see org.springframework.data.domain.OffsetScrollPosition + * @see org.springframework.data.domain.KeysetScrollPosition + */ + Mono> scroll(ScrollPosition scrollPosition); + + /** + * Get all matching elements using a {@link com.mongodb.CursorType#TailableAwait tailable cursor}. The stream will + * not be completed unless the {@link org.reactivestreams.Subscription} is + * {@link org.reactivestreams.Subscription#cancel() canceled}.
          + * However, the stream may become dead, or invalid, if either the query returns no match or the cursor returns the + * document at the "end" of the collection and then the application deletes that document.
          + * A stream that is no longer in use must be {@link reactor.core.Disposable#dispose()} disposed} otherwise the + * streams will linger and exhaust resources.
          + * NOTE: Requires a capped collection. + * + * @return the {@link Flux} emitting converted objects. + * @since 2.1 + */ + Flux tail(); + + } + + /** + * Compose find execution by calling one of the terminating methods. + * + * @since 5.0 + */ + interface TerminatingProjection { + + /** + * Get the number of matching elements.
          + * This method uses an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but + * guarantees shard, session and transaction compliance. In case an inaccurate count satisfies the applications + * needs use {@link ReactiveMongoOperations#estimatedCount(String)} for empty queries instead. * * @return {@link Mono} emitting total number of matching elements. Never {@literal null}. */ @@ -105,6 +176,18 @@ interface TerminatingFind { */ interface TerminatingFindNear { + /** + * Map the query result to a different type using {@link QueryResultConverter}. + * + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link ExecutableFindOperation.TerminatingFindNear}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since 5.0 + */ + @Contract("_ -> new") + TerminatingFindNear map(QueryResultConverter converter); + /** * Find all matching elements and return them as {@link org.springframework.data.geo.GeoResult}. * @@ -127,6 +210,18 @@ interface FindWithQuery extends TerminatingFind { */ TerminatingFind matching(Query query); + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingFind}. + * @throws IllegalArgumentException if criteria is {@literal null}. + * @since 3.0 + */ + default TerminatingFind matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } + /** * Set the filter query for the geoNear execution. * @@ -242,9 +337,21 @@ interface DistinctWithQuery extends DistinctWithProjection { * * @param query must not be {@literal null}. * @return new instance of {@link TerminatingDistinct}. - * @throws IllegalArgumentException if resultType is {@literal null}. + * @throws IllegalArgumentException if query is {@literal null}. */ TerminatingDistinct matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingDistinct}. + * @throws IllegalArgumentException if criteria is {@literal null}. + * @since 3.0 + */ + default TerminatingDistinct matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } } /** diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java index 90aa9f2de2..38e32dc977 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFindOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,24 +15,22 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import org.bson.Document; +import org.jspecify.annotations.Nullable; import org.springframework.dao.IncorrectResultSizeDataAccessException; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.Window; +import org.springframework.data.geo.GeoResult; +import org.springframework.data.mongodb.core.CollectionPreparerSupport.ReactiveCollectionPreparerDelegate; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.SerializationUtils; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; import org.springframework.util.StringUtils; -import com.mongodb.reactivestreams.client.FindPublisher; - /** * Implementation of {@link ReactiveFindOperation}. * @@ -40,23 +38,22 @@ * @author Christoph Strobl * @since 2.0 */ -@RequiredArgsConstructor class ReactiveFindOperationSupport implements ReactiveFindOperation { private static final Query ALL_QUERY = new Query(); - private final @NonNull ReactiveMongoTemplate template; + private final ReactiveMongoTemplate template; + + ReactiveFindOperationSupport(ReactiveMongoTemplate template) { + this.template = template; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation#query(java.lang.Class) - */ @Override public ReactiveFind query(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); - return new ReactiveFindSupport<>(template, domainType, domainType, null, ALL_QUERY); + return new ReactiveFindSupport<>(template, domainType, domainType, QueryResultConverter.entity(), null, ALL_QUERY); } /** @@ -65,85 +62,76 @@ public ReactiveFind query(Class domainType) { * @author Christoph Strobl * @since 2.0 */ - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) - static class ReactiveFindSupport + static class ReactiveFindSupport implements ReactiveFind, FindWithCollection, FindWithProjection, FindWithQuery { - @NonNull ReactiveMongoTemplate template; - @NonNull Class domainType; - Class returnType; - String collection; - Query query; + private final ReactiveMongoTemplate template; + private final Class domainType; + private final Class returnType; + private final QueryResultConverter resultConverter; + private final @Nullable String collection; + private final Query query; + + ReactiveFindSupport(ReactiveMongoTemplate template, Class domainType, Class returnType, + QueryResultConverter resultConverter, @Nullable String collection, + Query query) { + + this.template = template; + this.domainType = domainType; + this.returnType = returnType; + this.resultConverter = resultConverter; + this.collection = collection; + this.query = query; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithCollection#inCollection(java.lang.String) - */ @Override public FindWithProjection inCollection(String collection) { - Assert.hasText(collection, "Collection name must not be null nor empty!"); + Assert.hasText(collection, "Collection name must not be null nor empty"); - return new ReactiveFindSupport<>(template, domainType, returnType, collection, query); + return new ReactiveFindSupport<>(template, domainType, returnType, resultConverter, collection, query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithProjection#as(java.lang.Class) - */ @Override public FindWithQuery as(Class returnType) { - Assert.notNull(returnType, "ReturnType must not be null!"); + Assert.notNull(returnType, "ReturnType must not be null"); - return new ReactiveFindSupport<>(template, domainType, returnType, collection, query); + return new ReactiveFindSupport<>(template, domainType, returnType, QueryResultConverter.entity(), collection, + query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ @Override public TerminatingFind matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); - return new ReactiveFindSupport<>(template, domainType, returnType, collection, query); + return new ReactiveFindSupport<>(template, domainType, returnType, resultConverter, collection, query); + } + + @Override + public TerminatingResults map(QueryResultConverter converter) { + + Assert.notNull(converter, "QueryResultConverter must not be null"); + + return new ReactiveFindSupport<>(template, domainType, returnType, this.resultConverter.andThen(converter), + collection, query); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#first() - */ @Override public Mono first() { FindPublisherPreparer preparer = getCursorPreparer(query); - Flux result = doFind(new FindPublisherPreparer() { - @Override - public FindPublisher prepare(FindPublisher publisher) { - return preparer.prepare(publisher).limit(1); - } - }); + Flux result = doFind(publisher -> preparer.prepare(publisher).limit(1)); return result.next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#one() - */ @Override public Mono one() { FindPublisherPreparer preparer = getCursorPreparer(query); - Flux result = doFind(new FindPublisherPreparer() { - @Override - public FindPublisher prepare(FindPublisher publisher) { - return preparer.prepare(publisher).limit(2); - } - }); + Flux result = doFind(publisher -> preparer.prepare(publisher).limit(2)); return result.collectList().flatMap(it -> { @@ -153,57 +141,48 @@ public FindPublisher prepare(FindPublisher publisher) { if (it.size() > 1) { return Mono.error( - new IncorrectResultSizeDataAccessException("Query " + asString() + " returned non unique result.", 1)); + new IncorrectResultSizeDataAccessException("Query " + asString() + " returned non unique result", 1)); } return Mono.just(it.get(0)); }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#all() - */ @Override public Flux all() { return doFind(null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindWithQuery#near(org.springframework.data.mongodb.core.query.NearQuery) - */ + @Override + public Mono> scroll(ScrollPosition scrollPosition) { + return template.doScroll(query.with(scrollPosition), domainType, returnType, resultConverter, + getCollectionName()); + } + + @Override + public Flux tail() { + return doFind(template.new TailingQueryFindPublisherPreparer(query, domainType)); + } + @Override public TerminatingFindNear near(NearQuery nearQuery) { - return () -> template.geoNear(nearQuery, domainType, getCollectionName(), returnType); + return new TerminatingFindNearSupport<>(nearQuery, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#count() - */ @Override public Mono count() { return template.count(query, domainType, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.TerminatingFind#exists() - */ @Override public Mono exists() { return template.exists(query, domainType, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.FindDistinct#distinct(java.lang.String) - */ @Override public TerminatingDistinct distinct(String field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); return new DistinctOperationSupport<>(this, field); } @@ -213,15 +192,16 @@ private Flux doFind(@Nullable FindPublisherPreparer preparer) { Document queryObject = query.getQueryObject(); Document fieldsObject = query.getFieldsObject(); - return template.doFind(getCollectionName(), queryObject, fieldsObject, domainType, returnType, + return template.doFind(getCollectionName(), ReactiveCollectionPreparerDelegate.of(query), queryObject, + fieldsObject, domainType, returnType, resultConverter, preparer != null ? preparer : getCursorPreparer(query)); } - @SuppressWarnings("unchecked") + @SuppressWarnings({ "unchecked", "rawtypes" }) private Flux doFindDistinct(String field) { return template.findDistinct(query, field, getCollectionName(), domainType, - returnType == domainType ? (Class) Object.class : returnType); + returnType == domainType ? (Class) Object.class : returnType); } private FindPublisherPreparer getCursorPreparer(Query query) { @@ -229,17 +209,43 @@ private FindPublisherPreparer getCursorPreparer(Query query) { } private String getCollectionName() { - return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType); + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); } private String asString() { return SerializationUtils.serializeToJsonSafely(query); } + class TerminatingFindNearSupport implements TerminatingFindNear { + + private final NearQuery nearQuery; + private final QueryResultConverter resultConverter; + + public TerminatingFindNearSupport(NearQuery nearQuery, + QueryResultConverter resultConverter) { + this.nearQuery = nearQuery; + this.resultConverter = resultConverter; + } + + @Override + public TerminatingFindNear map(QueryResultConverter converter) { + + Assert.notNull(converter, "QueryResultConverter must not be null"); + + return new TerminatingFindNearSupport<>(nearQuery, this.resultConverter.andThen(converter)); + } + + @Override + public Flux> all() { + return template.doGeoNear(nearQuery, domainType, getCollectionName(), returnType, resultConverter); + } + } + /** * @author Christoph Strobl * @since 2.1 */ + @SuppressWarnings({ "unchecked", "rawtypes" }) static class DistinctOperationSupport implements TerminatingDistinct { private final String field; @@ -251,35 +257,22 @@ public DistinctOperationSupport(ReactiveFindSupport delegate, String field) { this.field = field; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.DistinctWithProjection#as(java.lang.Class) - */ @Override public TerminatingDistinct as(Class resultType) { - Assert.notNull(resultType, "ResultType must not be null!"); + Assert.notNull(resultType, "ResultType must not be null"); return new DistinctOperationSupport<>((ReactiveFindSupport) delegate.as(resultType), field); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation.DistinctWithQuery#matching(org.springframework.data.mongodb.core.query.Query) - */ @Override - @SuppressWarnings("unchecked") public TerminatingDistinct matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); - return new DistinctOperationSupport<>((ReactiveFindSupport) delegate.matching(query), field); + return new DistinctOperationSupport<>((ReactiveFindSupport) delegate.matching(query), field); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core..ReactiveFindOperation.TerminatingDistinct#all() - */ @Override public Flux all() { return delegate.doFindDistinct(field); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFluentMongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFluentMongoOperations.java index 15936c65d1..30d61771df 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFluentMongoOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveFluentMongoOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -23,4 +23,4 @@ * @since 2.0 */ public interface ReactiveFluentMongoOperations extends ReactiveFindOperation, ReactiveInsertOperation, - ReactiveUpdateOperation, ReactiveRemoveOperation, ReactiveAggregationOperation, ReactiveMapReduceOperation {} + ReactiveUpdateOperation, ReactiveRemoveOperation, ReactiveAggregationOperation, ReactiveMapReduceOperation, ReactiveChangeStreamOperation {} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperation.java index 946a82ae02..ff3b690639 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupport.java index e6daaedbe0..9d424c2446 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveInsertOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,7 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; +import org.jspecify.annotations.Nullable; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; @@ -34,69 +31,61 @@ * @author Christoph Strobl * @since 2.0 */ -@RequiredArgsConstructor class ReactiveInsertOperationSupport implements ReactiveInsertOperation { - private final @NonNull ReactiveMongoTemplate template; + private final ReactiveMongoTemplate template; + + ReactiveInsertOperationSupport(ReactiveMongoTemplate template) { + this.template = template; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveInsertOperation#insert(java.lang.Class) - */ @Override public ReactiveInsert insert(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); return new ReactiveInsertSupport<>(template, domainType, null); } - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) static class ReactiveInsertSupport implements ReactiveInsert { - @NonNull ReactiveMongoTemplate template; - @NonNull Class domainType; - String collection; + private final ReactiveMongoTemplate template; + private final Class domainType; + private final @Nullable String collection; + + ReactiveInsertSupport(ReactiveMongoTemplate template, Class domainType, @Nullable String collection) { + + this.template = template; + this.domainType = domainType; + this.collection = collection; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveInsertOperation.TerminatingInsert#one(java.lang.Object) - */ @Override public Mono one(T object) { - Assert.notNull(object, "Object must not be null!"); + Assert.notNull(object, "Object must not be null"); return template.insert(object, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveInsertOperation.TerminatingInsert#all(java.util.Collection) - */ @Override public Flux all(Collection objects) { - Assert.notNull(objects, "Objects must not be null!"); + Assert.notNull(objects, "Objects must not be null"); return template.insert(objects, getCollectionName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveInsertOperation.InsertWithCollection#inCollection(java.lang.String) - */ @Override public ReactiveInsert inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty."); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ReactiveInsertSupport<>(template, domainType, collection); } private String getCollectionName() { - return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType); + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperation.java index fca611f741..798b1ca7dd 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,6 +19,7 @@ import org.springframework.data.mongodb.core.ExecutableFindOperation.ExecutableFind; import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.Query; /** @@ -30,7 +31,7 @@ * The collection to operate on is by default derived from the initial {@literal domainType} and can be defined there * via {@link org.springframework.data.mongodb.core.mapping.Document}. Using {@code inCollection} allows to override the * collection name for the execution. - * + * *
            *     
            *         mapReduce(Human.class)
          @@ -146,6 +147,18 @@ interface MapReduceWithQuery extends TerminatingMapReduce {
           		 * @throws IllegalArgumentException if query is {@literal null}.
           		 */
           		TerminatingMapReduce matching(Query query);
          +
          +		/**
          +		 * Set the filter {@link CriteriaDefinition criteria} to be used.
          +		 *
          +		 * @param criteria must not be {@literal null}.
          +		 * @return new instance of {@link TerminatingMapReduce}.
          +		 * @throws IllegalArgumentException if query is {@literal null}.
          +		 * @since 3.0
          +		 */
          +		default TerminatingMapReduce matching(CriteriaDefinition criteria) {
          +			return matching(Query.query(criteria));
          +		}
           	}
           
           	/**
          diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupport.java
          index 036424125b..4e3379bad0 100644
          --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupport.java
          +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMapReduceOperationSupport.java
          @@ -1,11 +1,11 @@
           /*
          - * Copyright 2018 the original author or authors.
          + * Copyright 2018-2025 the original author or authors.
            *
            * Licensed under the Apache License, Version 2.0 (the "License");
            * you may not use this file except in compliance with the License.
            * You may obtain a copy of the License at
            *
          - *      http://www.apache.org/licenses/LICENSE-2.0
          + *      https://www.apache.org/licenses/LICENSE-2.0
            *
            * Unless required by applicable law or agreed to in writing, software
            * distributed under the License is distributed on an "AS IS" BASIS,
          @@ -15,13 +15,11 @@
            */
           package org.springframework.data.mongodb.core;
           
          -import lombok.NonNull;
          -import lombok.RequiredArgsConstructor;
           import reactor.core.publisher.Flux;
           
          +import org.jspecify.annotations.Nullable;
           import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
           import org.springframework.data.mongodb.core.query.Query;
          -import org.springframework.lang.Nullable;
           import org.springframework.util.Assert;
           import org.springframework.util.StringUtils;
           
          @@ -31,12 +29,15 @@
            * @author Christoph Strobl
            * @since 2.1
            */
          -@RequiredArgsConstructor
           class ReactiveMapReduceOperationSupport implements ReactiveMapReduceOperation {
           
           	private static final Query ALL_QUERY = new Query();
           
          -	private final @NonNull ReactiveMongoTemplate template;
          +	private final ReactiveMongoTemplate template;
          +
          +	ReactiveMapReduceOperationSupport(ReactiveMongoTemplate template) {
          +		this.template = template;
          +	}
           
           	/*
           	 * (non-Javascript)
          @@ -45,7 +46,7 @@ class ReactiveMapReduceOperationSupport implements ReactiveMapReduceOperation {
           	@Override
           	public  ReactiveMapReduceSupport mapReduce(Class domainType) {
           
          -		Assert.notNull(domainType, "DomainType must not be null!");
          +		Assert.notNull(domainType, "DomainType must not be null");
           
           		return new ReactiveMapReduceSupport<>(template, domainType, domainType, null, ALL_QUERY, null, null, null);
           	}
          @@ -88,8 +89,11 @@ static class ReactiveMapReduceSupport
           		@Override
           		public Flux all() {
           
          +			Assert.notNull(mapFunction, "MapFunction must be set first");
          +			Assert.notNull(reduceFunction, "ReduceFunction must be set first");
          +
           			return template.mapReduce(query, domainType, getCollectionName(), returnType, mapFunction, reduceFunction,
          -					options);
          +					options != null ? options : MapReduceOptions.options());
           		}
           
           		/*
          @@ -99,7 +103,7 @@ public Flux all() {
           		@Override
           		public MapReduceWithProjection inCollection(String collection) {
           
          -			Assert.hasText(collection, "Collection name must not be null nor empty!");
          +			Assert.hasText(collection, "Collection name must not be null nor empty");
           
           			return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
           					reduceFunction, options);
          @@ -112,7 +116,7 @@ public MapReduceWithProjection inCollection(String collection) {
           		@Override
           		public TerminatingMapReduce matching(Query query) {
           
          -			Assert.notNull(query, "Query must not be null!");
          +			Assert.notNull(query, "Query must not be null");
           
           			return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
           					reduceFunction, options);
          @@ -125,7 +129,7 @@ public TerminatingMapReduce matching(Query query) {
           		@Override
           		public  MapReduceWithQuery as(Class resultType) {
           
          -			Assert.notNull(resultType, "ResultType must not be null!");
          +			Assert.notNull(resultType, "ResultType must not be null");
           
           			return new ReactiveMapReduceSupport<>(template, domainType, resultType, collection, query, mapFunction,
           					reduceFunction, options);
          @@ -138,7 +142,7 @@ public  MapReduceWithQuery as(Class resultType) {
           		@Override
           		public ReactiveMapReduce with(MapReduceOptions options) {
           
          -			Assert.notNull(options, "Options must not be null! Please consider empty MapReduceOptions#options() instead.");
          +			Assert.notNull(options, "Options must not be null Please consider empty MapReduceOptions#options() instead");
           
           			return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
           					reduceFunction, options);
          @@ -151,7 +155,7 @@ public ReactiveMapReduce with(MapReduceOptions options) {
           		@Override
           		public MapReduceWithReduceFunction map(String mapFunction) {
           
          -			Assert.hasText(mapFunction, "MapFunction name must not be null nor empty!");
          +			Assert.hasText(mapFunction, "MapFunction name must not be null nor empty");
           
           			return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
           					reduceFunction, options);
          @@ -164,14 +168,14 @@ public MapReduceWithReduceFunction map(String mapFunction) {
           		@Override
           		public ReactiveMapReduce reduce(String reduceFunction) {
           
          -			Assert.hasText(reduceFunction, "ReduceFunction name must not be null nor empty!");
          +			Assert.hasText(reduceFunction, "ReduceFunction name must not be null nor empty");
           
           			return new ReactiveMapReduceSupport<>(template, domainType, returnType, collection, query, mapFunction,
           					reduceFunction, options);
           		}
           
           		private String getCollectionName() {
          -			return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType);
          +			return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType);
           		}
           	}
           }
          diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java
          index 4d41a5fcb0..89caf3273c 100644
          --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java
          +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientFactoryBean.java
          @@ -1,11 +1,11 @@
           /*
          - * Copyright 2016-2018 the original author or authors.
          + * Copyright 2016-2025 the original author or authors.
            *
            * Licensed under the Apache License, Version 2.0 (the "License");
            * you may not use this file except in compliance with the License.
            * You may obtain a copy of the License at
            *
          - *      http://www.apache.org/licenses/LICENSE-2.0
          + *      https://www.apache.org/licenses/LICENSE-2.0
            *
            * Unless required by applicable law or agreed to in writing, software
            * distributed under the License is distributed on an "AS IS" BASIS,
          @@ -16,13 +16,13 @@
           
           package org.springframework.data.mongodb.core;
           
          +import org.jspecify.annotations.Nullable;
           import org.springframework.beans.factory.config.AbstractFactoryBean;
           import org.springframework.dao.DataAccessException;
           import org.springframework.dao.support.PersistenceExceptionTranslator;
          -import org.springframework.lang.Nullable;
           import org.springframework.util.StringUtils;
           
          -import com.mongodb.async.client.MongoClientSettings;
          +import com.mongodb.MongoClientSettings;
           import com.mongodb.reactivestreams.client.MongoClient;
           import com.mongodb.reactivestreams.client.MongoClients;
           
          @@ -36,13 +36,11 @@
           public class ReactiveMongoClientFactoryBean extends AbstractFactoryBean
           		implements PersistenceExceptionTranslator {
           
          -	private static final PersistenceExceptionTranslator DEFAULT_EXCEPTION_TRANSLATOR = new MongoExceptionTranslator();
          -
           	private @Nullable String connectionString;
           	private @Nullable String host;
           	private @Nullable Integer port;
           	private @Nullable MongoClientSettings mongoClientSettings;
          -	private PersistenceExceptionTranslator exceptionTranslator = DEFAULT_EXCEPTION_TRANSLATOR;
          +	private PersistenceExceptionTranslator exceptionTranslator = MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR;
           
           	/**
           	 * Configures the host to connect to.
          @@ -86,7 +84,13 @@ public void setMongoClientSettings(@Nullable MongoClientSettings mongoClientSett
           	 * @param exceptionTranslator
           	 */
           	public void setExceptionTranslator(@Nullable PersistenceExceptionTranslator exceptionTranslator) {
          -		this.exceptionTranslator = exceptionTranslator == null ? DEFAULT_EXCEPTION_TRANSLATOR : exceptionTranslator;
          +		this.exceptionTranslator = exceptionTranslator == null ? MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR
          +				: exceptionTranslator;
          +	}
          +
          +	@Override
          +	public @Nullable DataAccessException translateExceptionIfPossible(RuntimeException ex) {
          +		return exceptionTranslator.translateExceptionIfPossible(ex);
           	}
           
           	@Override
          @@ -115,16 +119,14 @@ protected MongoClient createInstance() throws Exception {
           		}
           
           		throw new IllegalStateException(
          -				"Cannot create MongoClients. One of the following is required: mongoClientSettings, connectionString or host/port");
          +				"Cannot create MongoClients; One of the following is required: mongoClientSettings, connectionString or host/port");
           	}
           
           	@Override
           	protected void destroyInstance(@Nullable MongoClient instance) throws Exception {
          -		instance.close();
          +		if (instance != null) {
          +			instance.close();
          +		}
           	}
           
          -	@Override
          -	public DataAccessException translateExceptionIfPossible(RuntimeException ex) {
          -		return exceptionTranslator.translateExceptionIfPossible(ex);
          -	}
           }
          diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientSettingsFactoryBean.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientSettingsFactoryBean.java
          deleted file mode 100644
          index d109fed02d..0000000000
          --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoClientSettingsFactoryBean.java
          +++ /dev/null
          @@ -1,206 +0,0 @@
          -/*
          - * Copyright 2016-2018 the original author or authors.
          - *
          - * Licensed under the Apache License, Version 2.0 (the "License");
          - * you may not use this file except in compliance with the License.
          - * You may obtain a copy of the License at
          - *
          - *      http://www.apache.org/licenses/LICENSE-2.0
          - *
          - * Unless required by applicable law or agreed to in writing, software
          - * distributed under the License is distributed on an "AS IS" BASIS,
          - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
          - * See the License for the specific language governing permissions and
          - * limitations under the License.
          - */
          -package org.springframework.data.mongodb.core;
          -
          -import java.util.ArrayList;
          -import java.util.List;
          -
          -import org.bson.codecs.configuration.CodecRegistry;
          -import org.springframework.beans.factory.config.AbstractFactoryBean;
          -import org.springframework.util.Assert;
          -
          -import com.mongodb.MongoCredential;
          -import com.mongodb.ReadConcern;
          -import com.mongodb.ReadPreference;
          -import com.mongodb.WriteConcern;
          -import com.mongodb.async.client.MongoClientSettings;
          -import com.mongodb.connection.ClusterSettings;
          -import com.mongodb.connection.ConnectionPoolSettings;
          -import com.mongodb.connection.ServerSettings;
          -import com.mongodb.connection.SocketSettings;
          -import com.mongodb.connection.SslSettings;
          -import com.mongodb.connection.StreamFactoryFactory;
          -
          -/**
          - * A factory bean for construction of a {@link MongoClientSettings} instance to be used with the async MongoDB driver.
          - *
          - * @author Mark Paluch
          - * @since 2.0
          - */
          -public class ReactiveMongoClientSettingsFactoryBean extends AbstractFactoryBean {
          -
          -	private static final MongoClientSettings DEFAULT_MONGO_SETTINGS = MongoClientSettings.builder().build();
          -
          -	private ReadPreference readPreference = DEFAULT_MONGO_SETTINGS.getReadPreference();
          -	private WriteConcern writeConcern = DEFAULT_MONGO_SETTINGS.getWriteConcern();
          -	private ReadConcern readConcern = DEFAULT_MONGO_SETTINGS.getReadConcern();
          -	private List credentialList = new ArrayList<>();
          -	private StreamFactoryFactory streamFactoryFactory = DEFAULT_MONGO_SETTINGS.getStreamFactoryFactory();
          -	private CodecRegistry codecRegistry = DEFAULT_MONGO_SETTINGS.getCodecRegistry();
          -	private ClusterSettings clusterSettings = DEFAULT_MONGO_SETTINGS.getClusterSettings();
          -	private SocketSettings socketSettings = DEFAULT_MONGO_SETTINGS.getSocketSettings();
          -	private SocketSettings heartbeatSocketSettings = DEFAULT_MONGO_SETTINGS.getHeartbeatSocketSettings();
          -	private ConnectionPoolSettings connectionPoolSettings = DEFAULT_MONGO_SETTINGS.getConnectionPoolSettings();
          -	private ServerSettings serverSettings = DEFAULT_MONGO_SETTINGS.getServerSettings();
          -	private SslSettings sslSettings = DEFAULT_MONGO_SETTINGS.getSslSettings();
          -
          -	/**
          -	 * Set the {@link ReadPreference}.
          -	 *
          -	 * @param readPreference
          -	 */
          -	public void setReadPreference(ReadPreference readPreference) {
          -		this.readPreference = readPreference;
          -	}
          -
          -	/**
          -	 * Set the {@link WriteConcern}.
          -	 *
          -	 * @param writeConcern
          -	 */
          -	public void setWriteConcern(WriteConcern writeConcern) {
          -		this.writeConcern = writeConcern;
          -	}
          -
          -	/**
          -	 * Set the {@link ReadConcern}.
          -	 *
          -	 * @param readConcern
          -	 */
          -	public void setReadConcern(ReadConcern readConcern) {
          -		this.readConcern = readConcern;
          -	}
          -
          -	/**
          -	 * Set the List of {@link MongoCredential}s.
          -	 *
          -	 * @param credentialList must not be {@literal null}.
          -	 */
          -	public void setCredentialList(List credentialList) {
          -
          -		Assert.notNull(credentialList, "CredendialList must not be null!");
          -
          -		this.credentialList.addAll(credentialList);
          -	}
          -
          -	/**
          -	 * Adds the {@link MongoCredential} to the list of credentials.
          -	 *
          -	 * @param mongoCredential must not be {@literal null}.
          -	 */
          -	public void addMongoCredential(MongoCredential mongoCredential) {
          -
          -		Assert.notNull(mongoCredential, "MongoCredential must not be null!");
          -
          -		this.credentialList.add(mongoCredential);
          -	}
          -
          -	/**
          -	 * Set the {@link StreamFactoryFactory}.
          -	 *
          -	 * @param streamFactoryFactory
          -	 */
          -	public void setStreamFactoryFactory(StreamFactoryFactory streamFactoryFactory) {
          -		this.streamFactoryFactory = streamFactoryFactory;
          -	}
          -
          -	/**
          -	 * Set the {@link CodecRegistry}.
          -	 *
          -	 * @param codecRegistry
          -	 */
          -	public void setCodecRegistry(CodecRegistry codecRegistry) {
          -		this.codecRegistry = codecRegistry;
          -	}
          -
          -	/**
          -	 * Set the {@link ClusterSettings}.
          -	 *
          -	 * @param clusterSettings
          -	 */
          -	public void setClusterSettings(ClusterSettings clusterSettings) {
          -		this.clusterSettings = clusterSettings;
          -	}
          -
          -	/**
          -	 * Set the {@link SocketSettings}.
          -	 *
          -	 * @param socketSettings
          -	 */
          -	public void setSocketSettings(SocketSettings socketSettings) {
          -		this.socketSettings = socketSettings;
          -	}
          -
          -	/**
          -	 * Set the heartbeat {@link SocketSettings}.
          -	 *
          -	 * @param heartbeatSocketSettings
          -	 */
          -	public void setHeartbeatSocketSettings(SocketSettings heartbeatSocketSettings) {
          -		this.heartbeatSocketSettings = heartbeatSocketSettings;
          -	}
          -
          -	/**
          -	 * Set the {@link ConnectionPoolSettings}.
          -	 *
          -	 * @param connectionPoolSettings
          -	 */
          -	public void setConnectionPoolSettings(ConnectionPoolSettings connectionPoolSettings) {
          -		this.connectionPoolSettings = connectionPoolSettings;
          -	}
          -
          -	/**
          -	 * Set the {@link ServerSettings}.
          -	 *
          -	 * @param serverSettings
          -	 */
          -	public void setServerSettings(ServerSettings serverSettings) {
          -		this.serverSettings = serverSettings;
          -	}
          -
          -	/**
          -	 * Set the {@link SslSettings}.
          -	 *
          -	 * @param sslSettings
          -	 */
          -	public void setSslSettings(SslSettings sslSettings) {
          -		this.sslSettings = sslSettings;
          -	}
          -
          -	@Override
          -	public Class getObjectType() {
          -		return MongoClientSettings.class;
          -	}
          -
          -	@Override
          -	protected MongoClientSettings createInstance() throws Exception {
          -
          -		return MongoClientSettings.builder() //
          -				.readPreference(readPreference) //
          -				.writeConcern(writeConcern) //
          -				.readConcern(readConcern) //
          -				.credentialList(credentialList) //
          -				.streamFactoryFactory(streamFactoryFactory) //
          -				.codecRegistry(codecRegistry) //
          -				.clusterSettings(clusterSettings) //
          -				.socketSettings(socketSettings) //
          -				.heartbeatSocketSettings(heartbeatSocketSettings) //
          -				.connectionPoolSettings(connectionPoolSettings) //
          -				.serverSettings(serverSettings) //
          -				.sslSettings(sslSettings) //
          -				.build();
          -	}
          -}
          diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoContext.java
          index edb9144a04..8697ce4dcd 100644
          --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoContext.java
          +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoContext.java
          @@ -1,11 +1,11 @@
           /*
          - * Copyright 2018 the original author or authors.
          + * Copyright 2018-2025 the original author or authors.
            *
            * Licensed under the Apache License, Version 2.0 (the "License");
            * you may not use this file except in compliance with the License.
            * You may obtain a copy of the License at
            *
          - *      http://www.apache.org/licenses/LICENSE-2.0
          + *      https://www.apache.org/licenses/LICENSE-2.0
            *
            * Unless required by applicable law or agreed to in writing, software
            * distributed under the License is distributed on an "AS IS" BASIS,
          @@ -15,11 +15,15 @@
            */
           package org.springframework.data.mongodb.core;
           
          -import org.reactivestreams.Publisher;
          -import org.springframework.util.Assert;
           import reactor.core.publisher.Mono;
           import reactor.util.context.Context;
           
          +import java.util.function.Function;
          +
          +import org.reactivestreams.Publisher;
          +
          +import org.springframework.util.Assert;
          +
           import com.mongodb.reactivestreams.client.ClientSession;
           
           /**
          @@ -29,7 +33,7 @@
            * @author Christoph Strobl
            * @author Mark Paluch
            * @since 2.1
          - * @see Mono#subscriberContext()
          + * @see Mono#deferContextual(Function)
            * @see Context
            */
           public class ReactiveMongoContext {
          @@ -46,8 +50,14 @@ public class ReactiveMongoContext {
           	 */
           	public static Mono getSession() {
           
          -		return Mono.subscriberContext().filter(ctx -> ctx.hasKey(SESSION_KEY))
          -				.flatMap(ctx -> ctx.> get(SESSION_KEY));
          +		return Mono.deferContextual(ctx -> {
          +
          +			if (ctx.hasKey(SESSION_KEY)) {
          +				return ctx.> get(SESSION_KEY);
          +			}
          +
          +			return Mono.empty();
          +		});
           	}
           
           	/**
          @@ -60,8 +70,8 @@ public static Mono getSession() {
           	 */
           	public static Context setSession(Context context, Publisher session) {
           
          -		Assert.notNull(context, "Context must not be null!");
          -		Assert.notNull(session, "Session publisher must not be null!");
          +		Assert.notNull(context, "Context must not be null");
          +		Assert.notNull(session, "Session publisher must not be null");
           
           		return context.put(SESSION_KEY, Mono.from(session));
           	}
          diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java
          index e995c4ba63..14f6ee2631 100644
          --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java
          +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoOperations.java
          @@ -1,11 +1,11 @@
           /*
          - * Copyright 2016-2018 the original author or authors.
          + * Copyright 2016-2025 the original author or authors.
            *
            * Licensed under the Apache License, Version 2.0 (the "License");
            * you may not use this file except in compliance with the License.
            * You may obtain a copy of the License at
            *
          - *      http://www.apache.org/licenses/LICENSE-2.0
          + *      https://www.apache.org/licenses/LICENSE-2.0
            *
            * Unless required by applicable law or agreed to in writing, software
            * distributed under the License is distributed on an "AS IS" BASIS,
          @@ -23,23 +23,32 @@
           import java.util.function.Supplier;
           
           import org.bson.Document;
          +import org.jspecify.annotations.Nullable;
           import org.reactivestreams.Publisher;
           import org.reactivestreams.Subscription;
          +import org.springframework.data.domain.KeysetScrollPosition;
          +import org.springframework.data.domain.Sort;
          +import org.springframework.data.domain.Window;
           import org.springframework.data.geo.GeoResult;
           import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory;
          +import org.springframework.data.mongodb.core.BulkOperations.BulkMode;
           import org.springframework.data.mongodb.core.aggregation.Aggregation;
          +import org.springframework.data.mongodb.core.aggregation.AggregationOperation;
           import org.springframework.data.mongodb.core.aggregation.AggregationOptions;
          +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline;
          +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate;
           import org.springframework.data.mongodb.core.aggregation.TypedAggregation;
           import org.springframework.data.mongodb.core.convert.MappingMongoConverter;
           import org.springframework.data.mongodb.core.convert.MongoConverter;
           import org.springframework.data.mongodb.core.index.ReactiveIndexOperations;
           import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
           import org.springframework.data.mongodb.core.query.BasicQuery;
          +import org.springframework.data.mongodb.core.query.Collation;
           import org.springframework.data.mongodb.core.query.Criteria;
           import org.springframework.data.mongodb.core.query.NearQuery;
           import org.springframework.data.mongodb.core.query.Query;
           import org.springframework.data.mongodb.core.query.Update;
          -import org.springframework.lang.Nullable;
          +import org.springframework.data.mongodb.core.query.UpdateDefinition;
           import org.springframework.util.Assert;
           import org.springframework.util.ClassUtils;
           
          @@ -55,14 +64,18 @@
            * 

          * Implemented by {@link ReactiveMongoTemplate}. Not often used but a useful option for extensibility and testability * (as it can be easily mocked, stubbed, or be the target of a JDK proxy). Command execution using - * {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}. + * {@link ReactiveMongoOperations} is deferred until subscriber subscribes to the {@link Publisher}.
          + * NOTE: Some operations cannot be executed within a MongoDB transaction. Please refer to the MongoDB + * specific documentation to learn more about Multi + * Document Transactions. * * @author Mark Paluch * @author Christoph Strobl + * @author Mathieu Ouellet * @since 2.0 * @see Flux * @see Mono - * @see Project Reactor + * @see Project Reactor */ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { @@ -83,7 +96,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { ReactiveIndexOperations indexOps(Class entityClass); /** - * Execute the a MongoDB command expressed as a JSON string. This will call the method JSON.parse that is part of the + * Execute a MongoDB command expressed as a JSON string. This will call the method JSON.parse that is part of the * MongoDB driver to convert the JSON string to a Document. Any errors that result from executing this command will be * converted into Spring's DAO exception hierarchy. * @@ -112,8 +125,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { Mono executeCommand(Document command, @Nullable ReadPreference readPreference); /** - * Executes a {@link ReactiveDatabaseCallback} translating any exceptions as necessary. - *

          + * Executes a {@link ReactiveDatabaseCallback} translating any exceptions as necessary.
          * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param action callback object that specifies the MongoDB actions to perform on the passed in DB instance. Must not @@ -124,8 +136,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { Flux execute(ReactiveDatabaseCallback action); /** - * Executes the given {@link ReactiveCollectionCallback} on the entity collection of the specified class. - *

          + * Executes the given {@link ReactiveCollectionCallback} on the entity collection of the specified class.
          * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param entityClass class that determines the collection to use. Must not be {@literal null}. @@ -136,8 +147,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { Flux execute(Class entityClass, ReactiveCollectionCallback action); /** - * Executes the given {@link ReactiveCollectionCallback} on the collection of the given name. - *

          + * Executes the given {@link ReactiveCollectionCallback} on the collection of the given name.
          * Allows for returning a result object, that is a domain object or a collection of domain objects. * * @param collectionName the name of the collection that specifies which {@link MongoCollection} instance will be @@ -150,8 +160,7 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { /** * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding the {@link ClientSession} - * provided by the given {@link Supplier} to each and every command issued against MongoDB. - *

          + * provided by the given {@link Supplier} to each and every command issued against MongoDB.
          * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the * {@link ClientSession} when done. @@ -162,15 +171,14 @@ public interface ReactiveMongoOperations extends ReactiveFluentMongoOperations { */ default ReactiveSessionScoped withSession(Supplier sessionProvider) { - Assert.notNull(sessionProvider, "SessionProvider must not be null!"); + Assert.notNull(sessionProvider, "SessionProvider must not be null"); return withSession(Mono.fromSupplier(sessionProvider)); } /** * Obtain a {@link ClientSession session} bound instance of {@link SessionScoped} binding a new {@link ClientSession} - * with given {@literal sessionOptions} to each and every command issued against MongoDB. - *

          + * with given {@literal sessionOptions} to each and every command issued against MongoDB.
          * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the * {@link ClientSession} when done. @@ -184,7 +192,7 @@ default ReactiveSessionScoped withSession(Supplier sessionProvide /** * Obtain a {@link ClientSession session} bound instance of {@link ReactiveSessionScoped} binding the * {@link ClientSession} provided by the given {@link Publisher} to each and every command issued against MongoDB. - *

          + *
          * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. Use * {@link ReactiveSessionScoped#execute(ReactiveSessionCallback, Consumer)} to provide a hook for processing the * {@link ClientSession} when done. @@ -196,44 +204,14 @@ default ReactiveSessionScoped withSession(Supplier sessionProvide ReactiveSessionScoped withSession(Publisher sessionProvider); /** - * Obtain a {@link ClientSession} bound instance of {@link ReactiveMongoOperations}. - *

          + * Obtain a {@link ClientSession} bound instance of {@link ReactiveMongoOperations}.
          * Note: It is up to the caller to manage the {@link ClientSession} lifecycle. * - * @param session must not be {@literal null}. * @return {@link ClientSession} bound instance of {@link ReactiveMongoOperations}. * @since 2.1 */ ReactiveMongoOperations withSession(ClientSession session); - /** - * Initiate a new {@link ClientSession} and obtain a {@link ClientSession session} bound instance of - * {@link ReactiveSessionScoped}. Starts the transaction and adds the {@link ClientSession} to each and every command - * issued against MongoDB. - *

          - * Each {@link ReactiveSessionScoped#execute(ReactiveSessionCallback) execution} initiates a new managed transaction - * that is {@link ClientSession#commitTransaction() committed} on success. Transactions are - * {@link ClientSession#abortTransaction() rolled back} upon errors. - * - * @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}. - */ - ReactiveSessionScoped inTransaction(); - - /** - * Obtain a {@link ClientSession session} bound instance of {@link ReactiveSessionScoped}, start the transaction and - * bind the {@link ClientSession} provided by the given {@link Publisher} to each and every command issued against - * MongoDB. - *

          - * Each {@link ReactiveSessionScoped#execute(ReactiveSessionCallback) execution} initiates a new managed transaction - * that is {@link ClientSession#commitTransaction() committed} on success. Transactions are - * {@link ClientSession#abortTransaction() rolled back} upon errors. - * - * @param sessionProvider must not be {@literal null}. - * @return new instance of {@link ReactiveSessionScoped}. Never {@literal null}. - * @since 2.1 - */ - ReactiveSessionScoped inTransaction(Publisher sessionProvider); - /** * Create an uncapped collection with a name based on the provided entity class. * @@ -269,6 +247,58 @@ Mono> createCollection(Class entityClass, */ Mono> createCollection(String collectionName, CollectionOptions collectionOptions); + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationOperation pipeline + * stages} on another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param stages the {@link AggregationOperation aggregation pipeline stages} defining the view content. + * @since 4.0 + */ + default Mono> createView(String name, Class source, AggregationOperation... stages) { + return createView(name, source, AggregationPipeline.of(stages)); + } + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @since 4.0 + */ + default Mono> createView(String name, Class source, AggregationPipeline pipeline) { + return createView(name, source, pipeline, null); + } + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given {@link #getCollectionName(Class) source type}. + * + * @param name the name of the view to create. + * @param source the type defining the views source collection. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @param options additional settings to apply when creating the view. Can be {@literal null}. + * @since 4.0 + */ + Mono> createView(String name, Class source, AggregationPipeline pipeline, + @Nullable ViewOptions options); + + /** + * Create a view with the provided name. The view content is defined by the {@link AggregationPipeline pipeline} on + * another collection or view identified by the given source. + * + * @param name the name of the view to create. + * @param source the name of the collection or view defining the to be created views source. + * @param pipeline the {@link AggregationPipeline} defining the view content. + * @param options additional settings to apply when creating the view. Can be {@literal null}. + * @since 4.0 + */ + Mono> createView(String name, String source, AggregationPipeline pipeline, + @Nullable ViewOptions options); + /** * A set of collection names. * @@ -277,18 +307,19 @@ Mono> createCollection(Class entityClass, Flux getCollectionNames(); /** - * Get a collection by name, creating it if it doesn't exist. - *

          + * Get a {@link MongoCollection} by name. The returned collection may not exists yet (except in local memory) and is + * created on first interaction with the server. Collections can be explicitly created via + * {@link #createCollection(Class)}. Please make sure to check if the collection {@link #collectionExists(Class) + * exists} first.
          * Translate any exceptions as necessary. * * @param collectionName name of the collection. - * @return an existing collection or a newly created one. + * @return an existing collection or one created on first server interaction. */ - MongoCollection getCollection(String collectionName); + Mono> getCollection(String collectionName); /** - * Check to see if a collection with a name indicated by the entity class exists. - *

          + * Check to see if a collection with a name indicated by the entity class exists.
          * Translate any exceptions as necessary. * * @param entityClass class that determines the name of the collection. Must not be {@literal null}. @@ -297,8 +328,7 @@ Mono> createCollection(Class entityClass, Mono collectionExists(Class entityClass); /** - * Check to see if a collection with a given name exists. - *

          + * Check to see if a collection with a given name exists.
          * Translate any exceptions as necessary. * * @param collectionName name of the collection. Must not be {@literal null}. @@ -307,8 +337,7 @@ Mono> createCollection(Class entityClass, Mono collectionExists(String collectionName); /** - * Drop the collection with the name indicated by the entity class. - *

          + * Drop the collection with the name indicated by the entity class.
          * Translate any exceptions as necessary. * * @param entityClass class that determines the collection to drop/delete. Must not be {@literal null}. @@ -316,8 +345,7 @@ Mono> createCollection(Class entityClass, Mono dropCollection(Class entityClass); /** - * Drop the collection with the given name. - *

          + * Drop the collection with the given name.
          * Translate any exceptions as necessary. * * @param collectionName name of the collection to drop/delete. @@ -325,11 +353,43 @@ Mono> createCollection(Class entityClass, Mono dropCollection(String collectionName); /** - * Query for a {@link Flux} of objects of type T from the collection used by the entity class. - *

          + * Returns a new {@link ReactiveBulkOperations} for the given collection.
          + * NOTE: Any additional support for field mapping, etc. is not available for {@literal update} or + * {@literal remove} operations in bulk mode due to the lack of domain type information. Use + * {@link #bulkOps(BulkMode, Class, String)} to get full type specific support. + * + * @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}. + * @param collectionName the name of the collection to work on, must not be {@literal null} or empty. + * @return {@link ReactiveBulkOperations} on the named collection + * @since 4.1 + */ + ReactiveBulkOperations bulkOps(BulkMode mode, String collectionName); + + /** + * Returns a new {@link ReactiveBulkOperations} for the given entity type. + * + * @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}. + * @param entityClass the name of the entity class, must not be {@literal null}. + * @return {@link ReactiveBulkOperations} on the named collection associated of the given entity class. + * @since 4.1 + */ + ReactiveBulkOperations bulkOps(BulkMode mode, Class entityClass); + + /** + * Returns a new {@link ReactiveBulkOperations} for the given entity type and collection name. + * + * @param mode the {@link BulkMode} to use for bulk operations, must not be {@literal null}. + * @param entityType the name of the entity class. Can be {@literal null}. + * @param collectionName the name of the collection to work on, must not be {@literal null} or empty. + * @return {@link ReactiveBulkOperations} on the named collection associated with the given entity class. + * @since 4.1 + */ + ReactiveBulkOperations bulkOps(BulkMode mode, @Nullable Class entityType, String collectionName); + + /** + * Query for a {@link Flux} of objects of type T from the collection used by the entity class.
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -339,11 +399,9 @@ Mono> createCollection(Class entityClass, Flux findAll(Class entityClass); /** - * Query for a {@link Flux} of objects of type T from the specified collection. - *

          + * Query for a {@link Flux} of objects of type T from the specified collection.
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way * to map objects since the test for class type is done in the client and not on the server. * @@ -355,15 +413,13 @@ Mono> createCollection(Class entityClass, /** * Map the results of an ad-hoc query on the collection for the entity class to a single instance of an object of the - * specified type. - *

          + * specified type.
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Mono}. * @return the converted object. @@ -372,15 +428,13 @@ Mono> createCollection(Class entityClass, /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified - * type. - *

          + * type.
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Mono}. * @param collectionName name of the collection to retrieve the objects from. @@ -393,7 +447,7 @@ Mono> createCollection(Class entityClass, * NOTE: Any additional support for query/field mapping, etc. is not available due to the lack of * domain type information. Use {@link #exists(Query, Class, String)} to get full type specific support. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param collectionName name of the collection to check for objects. * @return {@literal true} if the query yields a result. */ @@ -402,7 +456,7 @@ Mono> createCollection(Class entityClass, /** * Determine result of given {@link Query} contains at least one element. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param entityClass the parametrized type. * @return {@literal true} if the query yields a result. */ @@ -411,7 +465,7 @@ Mono> createCollection(Class entityClass, /** * Determine result of given {@link Query} contains at least one element. * - * @param query the {@link Query} class that specifies the criteria used to find a record. + * @param query the {@link Query} class that specifies the criteria used to find a document. * @param entityClass the parametrized type. Can be {@literal null}. * @param collectionName name of the collection to check for objects. * @return {@literal true} if the query yields a result. @@ -420,14 +474,13 @@ Mono> createCollection(Class entityClass, /** * Map the results of an ad-hoc query on the collection for the entity class to a {@link Flux} of the specified type. - *

          + *
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityClass the parametrized type of the returned {@link Flux}. Must not be {@literal null}. * @return the {@link Flux} of converted objects. @@ -435,15 +488,13 @@ Mono> createCollection(Class entityClass, Flux find(Query query, Class entityClass); /** - * Map the results of an ad-hoc query on the specified collection to a {@link Flux} of the specified type. - *

          + * Map the results of an ad-hoc query on the specified collection to a {@link Flux} of the specified type.
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. Must not be {@literal null}. * @param entityClass the parametrized type of the returned {@link Flux}. * @param collectionName name of the collection to retrieve the objects from. Must not be {@literal null}. @@ -451,6 +502,57 @@ Mono> createCollection(Class entityClass, */ Flux find(Query query, Class entityClass, String collectionName); + /** + * Query for a scroll of objects of type T from the specified collection.
          + * Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with + * {@link Query#limit(int)} to limit large query results for efficient scrolling.
          + * Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + *

          + * When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort + * sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or + * {@literal null} values through {@code $gt/$lt} operators. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType the parametrized type of the returned list. + * @return {@link Mono} emitting the converted window. + * @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid + * position. + * @since 4.1 + * @see Query#with(org.springframework.data.domain.OffsetScrollPosition) + * @see Query#with(org.springframework.data.domain.KeysetScrollPosition) + */ + Mono> scroll(Query query, Class entityType); + + /** + * Query for a window of objects of type T from the specified collection.
          + * Make sure to either set {@link Query#skip(long)} or {@link Query#with(KeysetScrollPosition)} along with + * {@link Query#limit(int)} to limit large query results for efficient scrolling.
          + * Result objects are converted from the MongoDB native representation using an instance of {@see MongoConverter}. + * Unless configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          + * If your collection does not contain a homogeneous collection of types, this operation will not be an efficient way + * to map objects since the test for class type is done in the client and not on the server. + *

          + * When using {@link KeysetScrollPosition}, make sure to use non-nullable {@link org.springframework.data.domain.Sort + * sort properties} as MongoDB does not support criteria to reconstruct a query result from absent document fields or + * {@literal null} values through {@code $gt/$lt} operators. + * + * @param query the query class that specifies the criteria used to find a document and also an optional fields + * specification. Must not be {@literal null}. + * @param entityType the parametrized type of the returned list. + * @param collectionName name of the collection to retrieve the objects from. + * @return {@link Mono} emitting the converted window. + * @throws IllegalStateException if a potential {@link Query#getKeyset() KeysetScrollPosition} contains an invalid + * position. + * @since 4.1 + * @see Query#with(org.springframework.data.domain.OffsetScrollPosition) + * @see Query#with(org.springframework.data.domain.KeysetScrollPosition) + */ + Mono> scroll(Query query, Class entityType, String collectionName); + /** * Returns a document with the given id mapped onto the given class. The collection the query is ran against will be * derived from the given target class as well. @@ -550,11 +652,9 @@ default Flux findDistinct(Query query, String field, String collection, C Flux aggregate(TypedAggregation aggregation, String collectionName, Class outputType); /** - * Execute an aggregation operation. - *

          + * Execute an aggregation operation.
          * The raw results will be mapped to the given entity class and are returned as stream. The name of the - * inputCollection is derived from the {@link TypedAggregation#getInputType() aggregation input type}. - *

          + * inputCollection is derived from the {@link TypedAggregation#getInputType() aggregation input type}.
          * Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause * {@link IllegalArgumentException}. @@ -568,11 +668,9 @@ default Flux findDistinct(Query query, String field, String collection, C Flux aggregate(TypedAggregation aggregation, Class outputType); /** - * Execute an aggregation operation. - *

          + * Execute an aggregation operation.
          * The raw results will be mapped to the given {@code ouputType}. The name of the inputCollection is derived from the - * {@code inputType}. - *

          + * {@code inputType}.
          * Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause * {@link IllegalArgumentException}. @@ -588,10 +686,8 @@ default Flux findDistinct(Query query, String field, String collection, C Flux aggregate(Aggregation aggregation, Class inputType, Class outputType); /** - * Execute an aggregation operation. - *

          - * The raw results will be mapped to the given entity class. - *

          + * Execute an aggregation operation.
          + * The raw results will be mapped to the given entity class.
          * Aggregation streaming cannot be used with {@link AggregationOptions#isExplain() aggregation explain} nor with * {@link AggregationOptions#getCursorBatchSize()}. Enabling explanation mode or setting batch size cause * {@link IllegalArgumentException}. @@ -612,95 +708,149 @@ default Flux findDistinct(Query query, String field, String collection, C * entity mapping information to determine the collection the query is ran against. Note, that MongoDB limits the * number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect a * particular number of results. + *

          + * MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the + * {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using + * aggregations directly: + *

          + * + *
          +	 * TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
          +	 * 		.withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
          +	 * Flux<Document> results = aggregate(geoNear, Document.class);
          +	 * 
          * * @param near must not be {@literal null}. * @param entityClass must not be {@literal null}. * @return the converted {@link GeoResult}s. + * @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with + * {@link Aggregation#geoNear(NearQuery, String)} instead. */ + @Deprecated Flux> geoNear(NearQuery near, Class entityClass); /** * Returns {@link Flux} of {@link GeoResult} for all entities matching the given {@link NearQuery}. Note, that MongoDB * limits the number of results by default. Make sure to add an explicit limit to the {@link NearQuery} if you expect * a particular number of results. + *

          + * MongoDB 4.2 has removed the {@code geoNear} command. This method uses since version 2.2 aggregations and the + * {@code $geoNear} aggregation command to emulate {@code geoNear} command functionality. We recommend using + * aggregations directly: + *

          + * + *
          +	 * TypedAggregation<T> geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, "dis"))
          +	 * 		.withOptions(AggregationOptions.builder().collation(near.getCollation()).build());
          +	 * Flux<Document> results = aggregate(geoNear, Document.class);
          +	 * 
          * * @param near must not be {@literal null}. * @param entityClass must not be {@literal null}. * @param collectionName the collection to trigger the query against. If no collection name is given the entity class * will be inspected. * @return the converted {@link GeoResult}s. + * @deprecated since 2.2. The {@code eval} command has been removed in MongoDB Server 4.2.0. Use Aggregations with + * {@link Aggregation#geoNear(NearQuery, String)} instead. */ + @Deprecated Flux> geoNear(NearQuery near, Class entityClass, String collectionName); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. - * @param update the {@link Update} to apply on matching documents. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. * @return the converted object that was updated before it was updated. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono findAndModify(Query query, Update update, Class entityClass); + Mono findAndModify(Query query, UpdateDefinition update, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query}. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. - * @param update the {@link Update} to apply on matching documents. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. * @param collectionName the collection to query. Must not be {@literal null}. * @return the converted object that was updated before it was updated. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono findAndModify(Query query, Update update, Class entityClass, String collectionName); + Mono findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. - * @param update the {@link Update} to apply on matching documents. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. + * @param update the {@link UpdateDefinition} to apply on matching documents. * @param options the {@link FindAndModifyOptions} holding additional information. * @param entityClass the parametrized type. * @return the converted object that was updated. Depending on the value of {@link FindAndModifyOptions#isReturnNew()} * this will either be the object as it was before the update or as it is after the update. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass); + Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass); /** - * Triggers findAndModify + * Triggers findAndModify * to apply provided {@link Update} on documents matching {@link Criteria} of given {@link Query} taking * {@link FindAndModifyOptions} into account. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. - * @param update the {@link Update} to apply on matching documents. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} to apply on matching documents. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityClass the parametrized type. Must not be {@literal null}. * @param collectionName the collection to query. Must not be {@literal null}. * @return the converted object that was updated. Depending on the value of {@link FindAndModifyOptions#isReturnNew()} * this will either be the object as it was before the update or as it is after the update. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, + Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, Class entityClass, String collectionName); /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} * document.
          * Options are defaulted to {@link FindAndReplaceOptions#empty()}.
          * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @return the converted object that was updated or {@link Mono#empty()}, if not found. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. * @since 2.1 */ default Mono findAndReplace(Query query, T replacement) { @@ -709,14 +859,14 @@ default Mono findAndReplace(Query query, T replacement) { /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} * document.
          * Options are defaulted to {@link FindAndReplaceOptions#empty()}.
          * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param collectionName the collection to query. Must not be {@literal null}. * @return the converted object that was updated or {@link Mono#empty()}, if not found. @@ -728,18 +878,20 @@ default Mono findAndReplace(Query query, T replacement, String collection /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
          * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or * as it is after the update. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. * @since 2.1 */ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOptions options) { @@ -748,13 +900,13 @@ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOpt /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
          * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of @@ -764,19 +916,19 @@ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOpt */ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOptions options, String collectionName) { - Assert.notNull(replacement, "Replacement must not be null!"); + Assert.notNull(replacement, "Replacement must not be null"); return findAndReplace(query, replacement, options, (Class) ClassUtils.getUserClass(replacement), collectionName); } /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
          * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityType the parametrized type. Must not be {@literal null}. @@ -794,13 +946,13 @@ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOpt /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
          * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityType the type used for mapping the {@link Query} to domain type fields and deriving the collection @@ -810,6 +962,8 @@ default Mono findAndReplace(Query query, T replacement, FindAndReplaceOpt * @return the converted object that was updated or {@link Mono#empty()}, if not found. Depending on the value of * {@link FindAndReplaceOptions#isReturnNew()} this will either be the object as it was before the update or * as it is after the update. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. * @since 2.1 */ default Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, @@ -821,13 +975,13 @@ default Mono findAndReplace(Query query, S replacement, FindAndReplace /** * Triggers - * findOneAndReplace + * findOneAndReplace * to replace a single document matching {@link Criteria} of given {@link Query} with the {@code replacement} document * taking {@link FindAndReplaceOptions} into account.
          * NOTE: The replacement entity must not hold an {@literal id}. * - * @param query the {@link Query} class that specifies the {@link Criteria} used to find a record and also an optional - * fields specification. Must not be {@literal null}. + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document and also an + * optional fields specification. Must not be {@literal null}. * @param replacement the replacement document. Must not be {@literal null}. * @param options the {@link FindAndModifyOptions} holding additional information. Must not be {@literal null}. * @param entityType the type used for mapping the {@link Query} to domain type fields and deriving the collection @@ -846,14 +1000,12 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions /** * Map the results of an ad-hoc query on the collection for the entity type to a single instance of an object of the * specified type. The first document that matches the query is returned and also removed from the collection in the - * database. - *

          - * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. - *

          + * database.
          + * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}.
          * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Mono}. * @return the converted object @@ -863,14 +1015,13 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions /** * Map the results of an ad-hoc query on the specified collection to a single instance of an object of the specified * type. The first document that matches the query is returned and also removed from the collection in the database. - *

          + *
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Mono}. * @param collectionName name of the collection to retrieve the objects from. @@ -880,82 +1031,231 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions /** * Returns the number of documents for the given {@link Query} by querying the collection of the given entity class. + *
          + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
          + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. * * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be * {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. * @return the count of matching documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @see #exactCount(Query, Class) + * @see #estimatedCount(Class) */ Mono count(Query query, Class entityClass); /** * Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query} * must solely consist of document field references as we lack type information to map potential property references - * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support. + * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
          + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
          + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. * * @param query the {@link Query} class that specifies the criteria used to find documents. * @param collectionName must not be {@literal null} or empty. * @return the count of matching documents. * @see #count(Query, Class, String) + * @see #estimatedCount(String) + * @see #exactCount(Query, String) */ Mono count(Query query, String collectionName); /** * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity - * class to map the given {@link Query}. + * class to map the given {@link Query}.
          + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
          + * This method may choose to use {@link #estimatedCount(Class)} for empty queries instead of running an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} which may have an impact on performance. * * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be * {@literal null}. * @param entityClass the parametrized type. Can be {@literal null}. * @param collectionName must not be {@literal null} or empty. * @return the count of matching documents. + * @see #estimatedCount(String) + * @see #exactCount(Query, Class, String) */ Mono count(Query query, @Nullable Class entityClass, String collectionName); /** - * Insert the object into the collection for the entity type of the object to save. - *

          - * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. - *

          - * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a - * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See - * Spring's Type - * Conversion" for more details. - *

          - *

          + * Estimate the number of documents, in the collection {@link #getCollectionName(Class) identified by the given type}, + * based on collection statistics.
          + * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside + * transactions. + * + * @param entityClass must not be {@literal null}. + * @return a {@link Mono} emitting the estimated number of documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.1 + */ + default Mono estimatedCount(Class entityClass) { + + Assert.notNull(entityClass, "Entity class must not be null"); + return estimatedCount(getCollectionName(entityClass)); + } + + /** + * Estimate the number of documents in the given collection based on collection statistics.
          + * Please make sure to read the MongoDB reference documentation about limitations on eg. sharded cluster or inside + * transactions. + * + * @param collectionName must not be {@literal null}. + * @return a {@link Mono} emitting the estimated number of documents. + * @since 3.1 + */ + Mono estimatedCount(String collectionName); + + /** + * Returns the number of documents for the given {@link Query} by querying the collection of the given entity class. + *
          + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
          + * This method uses an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(Class)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass class that determines the collection to use. Must not be {@literal null}. + * @return the count of matching documents. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.4 + */ + default Mono exactCount(Query query, Class entityClass) { + return exactCount(query, entityClass, getCollectionName(entityClass)); + } + + /** + * Returns the number of documents for the given {@link Query} querying the given collection. The given {@link Query} + * must solely consist of document field references as we lack type information to map potential property references + * onto document fields. Use {@link #count(Query, Class, String)} to get full type specific support.
          + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
          + * This method uses an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(String)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @see #count(Query, Class, String) + * @since 3.4 + */ + default Mono exactCount(Query query, String collectionName) { + return exactCount(query, null, collectionName); + } + + /** + * Returns the number of documents for the given {@link Query} by querying the given collection using the given entity + * class to map the given {@link Query}.
          + * NOTE: Query {@link Query#getSkip() offset} and {@link Query#getLimit() limit} can have direct + * influence on the resulting number of documents found as those values are passed on to the server and potentially + * limit the range and order within which the server performs the count operation. Use an {@literal unpaged} query to + * count all matches.
          + * This method uses an + * {@link com.mongodb.reactivestreams.client.MongoCollection#countDocuments(org.bson.conversions.Bson, com.mongodb.client.model.CountOptions) + * aggregation execution} even for empty {@link Query queries} which may have an impact on performance, but guarantees + * shard, session and transaction compliance. In case an inaccurate count satisfies the applications needs use + * {@link #estimatedCount(String)} for empty queries instead. + * + * @param query the {@link Query} class that specifies the criteria used to find documents. Must not be + * {@literal null}. + * @param entityClass the parametrized type. Can be {@literal null}. + * @param collectionName must not be {@literal null} or empty. + * @return the count of matching documents. + * @since 3.4 + */ + Mono exactCount(Query query, @Nullable Class entityClass, String collectionName); + + /** + * Insert the object into the collection for the entity type of the object to save.
          + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
          + * If your object has an {@literal Id} property which holds a {@literal null} value, it will be set with the generated + * Id from MongoDB. If your Id property is a String then MongoDB ObjectId will be used to populate that string. + * Otherwise, the conversion from ObjectId to your property type will be handled by Spring's BeanWrapper class that + * leverages Type Conversion API. See + * Spring's + * Type Conversion" for more details.
          * Insert is used to initially store the object into the database. To update an existing object use the save method. + *

          + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + *

          + * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @return the inserted object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. */ Mono insert(T objectToSave); /** - * Insert the object into the specified collection. - *

          + * Insert the object into the specified collection.
          * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * Insert is used to initially store the object into the database. To update an existing object use the save method. + *

          + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. + *

          + * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. * @return the inserted object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. */ Mono insert(T objectToSave, String collectionName); /** * Insert a Collection of objects into a collection in a single batch write to the database. + *

          + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

          + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the batch of objects to save. Must not be {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. - * @return the inserted objects . + * @return the inserted objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ Flux insert(Collection batchToSave, Class entityClass); /** * Insert a batch of objects into the specified collection in a single batch write to the database. + *

          + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

          + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the list of objects to save. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. @@ -966,25 +1266,31 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions /** * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the * class. + *

          + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

          + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param objectsToSave the list of objects to save. Must not be {@literal null}. * @return the saved objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} for the given objects. */ Flux insertAll(Collection objectsToSave); /** - * Insert the object into the collection for the entity type of the object to save. - *

          - * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. - *

          - * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * Insert the object into the collection for the entity type of the object to save.
          + * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}.
          + * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See - * Spring's Type - * Conversion" for more details. - *

          - *

          + * Spring's + * Type Conversion" for more details.
          * Insert is used to initially store the object into the database. To update an existing object use the save method. + *

          + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @return the inserted objects. @@ -993,15 +1299,27 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions /** * Insert a Collection of objects into a collection in a single batch write to the database. + *

          + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

          + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the publisher which provides objects to save. Must not be {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. * @return the inserted objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} for the type. */ Flux insertAll(Mono> batchToSave, Class entityClass); /** * Insert objects into the specified collection in a single batch write to the database. + *

          + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

          + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param batchToSave the publisher which provides objects to save. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. @@ -1012,6 +1330,11 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions /** * Insert a mixed Collection of objects into a database collection determining the collection name to use based on the * class. + *

          + * If an object within the batch has an {@literal Id} property which holds a {@literal null} value, it will be set + * with the generated Id from MongoDB. + *

          + * Inserting new objects will trigger {@link org.springframework.data.annotation.Version} property initialization. * * @param objectsToSave the publisher which provides objects to save. Must not be {@literal null}. * @return the inserted objects. @@ -1020,209 +1343,292 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions /** * Save the object to the collection for the entity type of the object to save. This will perform an insert if the - * object is not already present, that is an 'upsert'. - *

          + * object is not already present, that is an 'upsert'.
          * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          - * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          + * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See - * Spring's Type - * Conversion" for more details. + * Spring's + * Type Conversion" for more details. + *

          + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. + *

          + * The {@code objectToSave} must not be collection-like. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @return the saved object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ Mono save(T objectToSave); /** * Save the object to the specified collection. This will perform an insert if the object is not already present, that - * is an 'upsert'. - *

          + * is an 'upsert'.
          * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          - * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          + * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's Type - * Conversion" for more details. + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * Spring's Type + * Conversion for more details. + *

          + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. * @return the saved object. + * @throws IllegalArgumentException in case the {@code objectToSave} is collection-like. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ Mono save(T objectToSave, String collectionName); /** * Save the object to the collection for the entity type of the object to save. This will perform an insert if the - * object is not already present, that is an 'upsert'. - *

          + * object is not already present, that is an 'upsert'.
          * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          - * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          + * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See - * Spring's Type - * Conversion" for more details. + * Spring's Type + * Conversion for more details. + *

          + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. * * @param objectToSave the object to store in the collection. Must not be {@literal null}. * @return the saved object. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ Mono save(Mono objectToSave); /** * Save the object to the specified collection. This will perform an insert if the object is not already present, that - * is an 'upsert'. - *

          + * is an 'upsert'.
          * The object is converted to the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          - * If you object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          + * If your object has an "Id' property, it will be set with the generated Id from MongoDB. If your Id property is a * String then MongoDB ObjectId will be used to populate that string. Otherwise, the conversion from ObjectId to your - * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See Spring's Type - * Conversion" for more details. + * property type will be handled by Spring's BeanWrapper class that leverages Type Conversion API. See + * Spring's Type + * Conversion for more details. + *

          + * A potential {@link org.springframework.data.annotation.Version} the property will be auto incremented. The + * operation raises an error in case the document has been modified in between. * - * @param objectToSave the object to store in the collection. Must not be {@literal null}. + * @param objectToSave the object to store in the collReactiveMongoOperationsection. Must not be {@literal null}. * @param collectionName name of the collection to store the object in. Must not be {@literal null}. * @return the saved object. + * @throws org.springframework.dao.OptimisticLockingFailureException in case of version mismatch in case a + * {@link org.springframework.data.annotation.Version} is defined. */ Mono save(Mono objectToSave, String collectionName); /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by * combining the query document and the update document. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + *

          + * NOTE: {@link Query#getSortObject() sorting} is not supported by {@code db.collection.updateOne}. + * Use {@link #findAndModify(Query, UpdateDefinition, Class)} instead. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. * @param entityClass class that determines the collection to use. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono upsert(Query query, Update update, Class entityClass); + Mono upsert(Query query, UpdateDefinition update, Class entityClass); /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by * combining the query document and the update document.
          * NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of - * domain type information. Use {@link #upsert(Query, Update, Class, String)} to get full type specific support. + * domain type information. Use {@link #upsert(Query, UpdateDefinition, Class, String)} to get full type specific + * support. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono upsert(Query query, Update update, String collectionName); + Mono upsert(Query query, UpdateDefinition update, String collectionName); /** * Performs an upsert. If no document is found that matches the query, a new document is created and inserted by * combining the query document and the update document. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be upserted. Must not be + * @param query the query document that specifies the criteria used to select a document to be upserted. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing - * object. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing object. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono upsert(Query query, Update update, Class entityClass, String collectionName); + Mono upsert(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** * Updates the first object that is found in the collection of the entity class that matches the query document with * the provided update document. - * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param entityClass class that determines the collection to use. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono updateFirst(Query query, Update update, Class entityClass); + Mono updateFirst(Query query, UpdateDefinition update, Class entityClass); /** * Updates the first object that is found in the specified collection that matches the query document criteria with * the provided updated document.
          * NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of - * domain type information. Use {@link #updateFirst(Query, Update, Class, String)} to get full type specific support. - * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * domain type information. Use {@link #updateFirst(Query, UpdateDefinition, Class, String)} to get full type specific + * support. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono updateFirst(Query query, Update update, String collectionName); + Mono updateFirst(Query query, UpdateDefinition update, String collectionName); /** * Updates the first object that is found in the specified collection that matches the query document criteria with - * the provided updated document.
          - * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be - * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * the provided updated document. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. + * + * @param query the query document that specifies the criteria used to select a document to be updated. The + * {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to update when + * potentially matching multiple candidates. Must not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono updateFirst(Query query, Update update, Class entityClass, String collectionName); + Mono updateFirst(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** * Updates all objects that are found in the collection for the entity class that matches the query document criteria * with the provided updated document. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. + * @see Update + * @see AggregationUpdate */ - Mono updateMulti(Query query, Update update, Class entityClass); + Mono updateMulti(Query query, UpdateDefinition update, Class entityClass); /** * Updates all objects that are found in the specified collection that matches the query document criteria with the * provided updated document.
          * NOTE: Any additional support for field mapping, versions, etc. is not available due to the lack of - * domain type information. Use {@link #updateMulti(Query, Update, Class, String)} to get full type specific support. + * domain type information. Use {@link #updateMulti(Query, UpdateDefinition, Class, String)} to get full type specific + * support. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono updateMulti(Query query, Update update, String collectionName); + Mono updateMulti(Query query, UpdateDefinition update, String collectionName); /** * Updates all objects that are found in the collection for the entity class that matches the query document criteria * with the provided updated document. + *

          + * A potential {@link org.springframework.data.annotation.Version} property of the {@literal entityClass} will be + * auto-incremented if not explicitly specified in the update. * - * @param query the query document that specifies the criteria used to select a record to be updated. Must not be + * @param query the query document that specifies the criteria used to select a document to be updated. Must not be * {@literal null}. - * @param update the update document that contains the updated object or $ operators to manipulate the existing. Must - * not be {@literal null}. + * @param update the {@link UpdateDefinition} that contains the updated object or {@code $} operators to manipulate + * the existing. Must not be {@literal null}. * @param entityClass class of the pojo to be operated on. Must not be {@literal null}. * @param collectionName name of the collection to update the object in. Must not be {@literal null}. * @return the {@link UpdateResult} which lets you access the results of the previous write. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - Mono updateMulti(Query query, Update update, Class entityClass, String collectionName); + Mono updateMulti(Query query, UpdateDefinition update, Class entityClass, String collectionName); /** - * Remove the given object from the collection by id. + * Remove the given object from the collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}. * * @param object must not be {@literal null}. * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. */ Mono remove(Object object); @@ -1230,45 +1636,54 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions * Removes the given object from the given collection. * * @param object must not be {@literal null}. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. */ Mono remove(Object object, String collectionName); /** - * Remove the given object from the collection by id. + * Remove the given object from the collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}. * * @param objectToRemove must not be {@literal null}. * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given object type. */ Mono remove(Mono objectToRemove); /** - * Removes the given object from the given collection. + * Removes the given object from the given collection by {@literal id} and (if applicable) its + * {@link org.springframework.data.annotation.Version}. * * @param objectToRemove must not be {@literal null}. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. */ Mono remove(Mono objectToRemove, String collectionName); /** - * Remove all documents that match the provided query document criteria from the the collection used to store the + * Remove all documents that match the provided query document criteria from the collection used to store the * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. * - * @param query the query document that specifies the criteria used to remove a record. + * @param query the query document that specifies the criteria used to remove a document. * @param entityClass class that determines the collection to use. * @return the {@link DeleteResult} which lets you access the results of the previous delete. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ Mono remove(Query query, Class entityClass); /** - * Remove all documents that match the provided query document criteria from the the collection used to store the + * Remove all documents that match the provided query document criteria from the collection used to store the * entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the query. * - * @param query the query document that specifies the criteria used to remove a record. + * @param query the query document that specifies the criteria used to remove a document. * @param entityClass class of the pojo to be operated on. Can be {@literal null}. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. */ Mono remove(Query query, @Nullable Class entityClass, String collectionName); @@ -1279,8 +1694,9 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions * NOTE: Any additional support for field mapping is not available due to the lack of domain type * information. Use {@link #remove(Query, Class, String)} to get full type specific support. * - * @param query the query document that specifies the criteria used to remove a record. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param query the query document that specifies the criteria used to remove a document. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link DeleteResult} which lets you access the results of the previous delete. */ Mono remove(Query query, String collectionName); @@ -1291,7 +1707,8 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions * information. Use {@link #findAllAndRemove(Query, Class, String)} to get full type specific support. * * @param query the query document that specifies the criteria used to find and remove documents. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link Flux} converted objects deleted by this operation. */ Flux findAllAndRemove(Query query, String collectionName); @@ -1302,37 +1719,113 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions * @param query the query document that specifies the criteria used to find and remove documents. * @param entityClass class of the pojo to be operated on. * @return the {@link Flux} converted objects deleted by this operation. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ Flux findAllAndRemove(Query query, Class entityClass); /** - * Returns and removes all documents that match the provided query document criteria from the the collection used to - * store the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in - * the query. + * Returns and removes all documents that match the provided query document criteria from the collection used to store + * the entityClass. The Class parameter is also used to help convert the Id of the object if it is present in the + * query. * * @param query the query document that specifies the criteria used to find and remove documents. * @param entityClass class of the pojo to be operated on. - * @param collectionName name of the collection where the objects will removed, must not be {@literal null} or empty. + * @param collectionName name of the collection where the documents will be removed from, must not be {@literal null} + * or empty. * @return the {@link Flux} converted objects deleted by this operation. */ Flux findAllAndRemove(Query query, Class entityClass, String collectionName); + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document.
          + * The collection name is derived from the {@literal replacement} type.
          + * Options are defaulted to {@link ReplaceOptions#none()}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + default Mono replace(Query query, T replacement) { + return replace(query, replacement, ReplaceOptions.none()); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document. Options are defaulted to {@link ReplaceOptions#none()}. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param collectionName the collection to query. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @since 4.2 + */ + default Mono replace(Query query, T replacement, String collectionName) { + return replace(query, replacement, ReplaceOptions.none(), collectionName); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document taking {@link ReplaceOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document.The query may + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + default Mono replace(Query query, T replacement, ReplaceOptions options) { + return replace(query, replacement, options, getCollectionName(ClassUtils.getUserClass(replacement))); + } + + /** + * Replace a single document matching the {@link Criteria} of given {@link Query} with the {@code replacement} + * document taking {@link ReplaceOptions} into account. + * + * @param query the {@link Query} class that specifies the {@link Criteria} used to find a document. The query may * + * contain an index {@link Query#withHint(String) hint} or the {@link Query#collation(Collation) collation} + * to use. The {@link Query} may define a {@link Query#with(Sort) sort order} to influence which document to + * replace when potentially matching multiple candidates. Must not be {@literal null}. + * @param replacement the replacement document. Must not be {@literal null}. + * @param options the {@link ReplaceOptions} holding additional information. Must not be {@literal null}. + * @return the {@link UpdateResult} which lets you access the results of the previous replacement. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be + * {@link #getCollectionName(Class) derived} from the given replacement value. + * @since 4.2 + */ + Mono replace(Query query, T replacement, ReplaceOptions options, String collectionName); + /** * Map the results of an ad-hoc query on the collection for the entity class to a stream of objects of the specified * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is - * {@link Subscription#cancel() canceled}. - *

          + * {@link Subscription#cancel() canceled}.
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Flux}. * @return the {@link Flux} of converted objects. + * @throws org.springframework.data.mapping.MappingException if the target collection name cannot be + * {@link #getCollectionName(Class) derived} from the given type. */ Flux tail(Query query, Class entityClass); @@ -1340,15 +1833,13 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions * Map the results of an ad-hoc query on the collection for the entity class to a stream of objects of the specified * type. The stream uses a {@link com.mongodb.CursorType#TailableAwait tailable} cursor that may be an infinite * stream. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is - * {@link Subscription#cancel() canceled}. - *

          + * {@link Subscription#cancel() canceled}.
          * The object is converted from the MongoDB native representation using an instance of {@see MongoConverter}. Unless - * configured otherwise, an instance of {@link MappingMongoConverter} will be used. - *

          + * configured otherwise, an instance of {@link MappingMongoConverter} will be used.
          * The query is specified as a {@link Query} which can be created either using the {@link BasicQuery} or the more * feature rich {@link Query}. * - * @param query the query class that specifies the criteria used to find a record and also an optional fields + * @param query the query class that specifies the criteria used to find a document and also an optional fields * specification. * @param entityClass the parametrized type of the returned {@link Flux}. * @param collectionName name of the collection to retrieve the objects from. @@ -1360,11 +1851,9 @@ Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions * Subscribe to a MongoDB Change Stream for all events in * the configured default database via the reactive infrastructure. Use the optional provided {@link Aggregation} to * filter events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is - * {@link Subscription#cancel() canceled}. - *

          + * {@link Subscription#cancel() canceled}.
          * The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the - * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload. - *

          + * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
          * Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} * for resuming change streams. * @@ -1384,11 +1873,9 @@ default Flux> changeStream(ChangeStreamOptions options, * Subscribe to a MongoDB Change Stream for all events in * the given collection via the reactive infrastructure. Use the optional provided {@link Aggregation} to filter * events. The stream will not be completed unless the {@link org.reactivestreams.Subscription} is - * {@link Subscription#cancel() canceled}. - *

          + * {@link Subscription#cancel() canceled}.
          * The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the - * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload. - *

          + * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
          * Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} * for resuming change streams. * @@ -1409,11 +1896,9 @@ default Flux> changeStream(@Nullable String collectionN /** * Subscribe to a MongoDB Change Stream via the reactive * infrastructure. Use the optional provided {@link Aggregation} to filter events. The stream will not be completed - * unless the {@link org.reactivestreams.Subscription} is {@link Subscription#cancel() canceled}. - *

          + * unless the {@link org.reactivestreams.Subscription} is {@link Subscription#cancel() canceled}.
          * The {@link ChangeStreamEvent#getBody()} is mapped to the {@literal resultType} while the - * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload. - *

          + * {@link ChangeStreamEvent#getRaw()} contains the unmodified payload.
          * Use {@link ChangeStreamOptions} to set arguments like {@link ChangeStreamOptions#getResumeToken() the resumseToken} * for resuming change streams. * @@ -1443,7 +1928,9 @@ Flux> changeStream(@Nullable String database, @Nullable * @param options additional options like output collection. Must not be {@literal null}. * @return a {@link Flux} emitting the result document sequence. Never {@literal null}. * @since 2.1 + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated Flux mapReduce(Query filterQuery, Class domainType, Class resultType, String mapFunction, String reduceFunction, MapReduceOptions options); @@ -1461,14 +1948,16 @@ Flux mapReduce(Query filterQuery, Class domainType, Class resultTyp * @param options additional options like output collection. Must not be {@literal null}. * @return a {@link Flux} emitting the result document sequence. Never {@literal null}. * @since 2.1 + * @deprecated since 3.4 in favor of {@link #aggregate(TypedAggregation, Class)}. */ + @Deprecated Flux mapReduce(Query filterQuery, Class domainType, String inputCollectionName, Class resultType, String mapFunction, String reduceFunction, MapReduceOptions options); /** * Returns the underlying {@link MongoConverter}. * - * @return + * @return never {@literal null}. */ MongoConverter getConverter(); @@ -1476,7 +1965,8 @@ Flux mapReduce(Query filterQuery, Class domainType, String inputCollec * The collection name used for the specified class by this template. * * @param entityClass must not be {@literal null}. - * @return + * @return never {@literal null}. + * @throws org.springframework.data.mapping.MappingException if the collection name cannot be derived from the type. * @since 2.1 */ String getCollectionName(Class entityClass); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java index 2a88ef4a17..0ad473b8b7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveMongoTemplate.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,30 +17,36 @@ import static org.springframework.data.mongodb.core.query.SerializationUtils.*; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.util.function.Tuple2; import reactor.util.function.Tuples; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Optional; import java.util.concurrent.TimeUnit; +import java.util.function.BiFunction; +import java.util.function.BiPredicate; import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; -import org.bson.BsonTimestamp; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.bson.BsonValue; import org.bson.Document; -import org.bson.codecs.Codec; import org.bson.conversions.Bson; import org.bson.types.ObjectId; +import org.jspecify.annotations.Nullable; import org.reactivestreams.Publisher; import org.reactivestreams.Subscriber; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; @@ -53,70 +59,105 @@ import org.springframework.dao.OptimisticLockingFailureException; import org.springframework.dao.support.PersistenceExceptionTranslator; import org.springframework.data.convert.EntityReader; +import org.springframework.data.domain.OffsetScrollPosition; +import org.springframework.data.domain.Window; import org.springframework.data.geo.Distance; import org.springframework.data.geo.GeoResult; import org.springframework.data.geo.Metric; +import org.springframework.data.mapping.MappingException; import org.springframework.data.mapping.PersistentEntity; -import org.springframework.data.mapping.PropertyPath; -import org.springframework.data.mapping.PropertyReferenceException; +import org.springframework.data.mapping.callback.ReactiveEntityCallbacks; import org.springframework.data.mapping.context.MappingContext; import org.springframework.data.mapping.context.MappingContextEvent; -import org.springframework.data.mongodb.MongoDbFactory; +import org.springframework.data.mongodb.MongoDatabaseFactory; import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; +import org.springframework.data.mongodb.ReactiveMongoDatabaseUtils; +import org.springframework.data.mongodb.SessionSynchronization; +import org.springframework.data.mongodb.core.BulkOperations.BulkMode; +import org.springframework.data.mongodb.core.CollectionPreparerSupport.ReactiveCollectionPreparerDelegate; +import org.springframework.data.mongodb.core.DefaultReactiveBulkOperations.ReactiveBulkOperationContext; import org.springframework.data.mongodb.core.EntityOperations.AdaptibleEntity; +import org.springframework.data.mongodb.core.QueryOperations.AggregationDefinition; +import org.springframework.data.mongodb.core.QueryOperations.CountContext; +import org.springframework.data.mongodb.core.QueryOperations.DeleteContext; +import org.springframework.data.mongodb.core.QueryOperations.DistinctQueryContext; +import org.springframework.data.mongodb.core.QueryOperations.QueryContext; +import org.springframework.data.mongodb.core.QueryOperations.UpdateContext; +import org.springframework.data.mongodb.core.ScrollUtils.KeysetScrollQuery; import org.springframework.data.mongodb.core.aggregation.Aggregation; import org.springframework.data.mongodb.core.aggregation.AggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.AggregationOptions; +import org.springframework.data.mongodb.core.aggregation.AggregationOptions.Builder; +import org.springframework.data.mongodb.core.aggregation.AggregationPipeline; import org.springframework.data.mongodb.core.aggregation.PrefixingDelegatingAggregationOperationContext; +import org.springframework.data.mongodb.core.aggregation.RelaxedTypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.TypeBasedAggregationOperationContext; import org.springframework.data.mongodb.core.aggregation.TypedAggregation; -import org.springframework.data.mongodb.core.convert.*; +import org.springframework.data.mongodb.core.convert.DbRefResolver; +import org.springframework.data.mongodb.core.convert.MappingMongoConverter; +import org.springframework.data.mongodb.core.convert.MongoConverter; +import org.springframework.data.mongodb.core.convert.MongoCustomConversions; +import org.springframework.data.mongodb.core.convert.MongoWriter; +import org.springframework.data.mongodb.core.convert.NoOpDbRefResolver; +import org.springframework.data.mongodb.core.convert.QueryMapper; +import org.springframework.data.mongodb.core.convert.UpdateMapper; import org.springframework.data.mongodb.core.index.MongoMappingEventPublisher; import org.springframework.data.mongodb.core.index.ReactiveIndexOperations; import org.springframework.data.mongodb.core.index.ReactiveMongoPersistentEntityIndexCreator; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.mapping.MongoMappingContext; import org.springframework.data.mongodb.core.mapping.MongoPersistentEntity; import org.springframework.data.mongodb.core.mapping.MongoPersistentProperty; import org.springframework.data.mongodb.core.mapping.MongoSimpleTypes; -import org.springframework.data.mongodb.core.mapping.event.AfterConvertEvent; -import org.springframework.data.mongodb.core.mapping.event.AfterDeleteEvent; -import org.springframework.data.mongodb.core.mapping.event.AfterLoadEvent; -import org.springframework.data.mongodb.core.mapping.event.AfterSaveEvent; -import org.springframework.data.mongodb.core.mapping.event.BeforeConvertEvent; -import org.springframework.data.mongodb.core.mapping.event.BeforeDeleteEvent; -import org.springframework.data.mongodb.core.mapping.event.BeforeSaveEvent; -import org.springframework.data.mongodb.core.mapping.event.MongoMappingEvent; +import org.springframework.data.mongodb.core.mapping.event.*; import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions; +import org.springframework.data.mongodb.core.query.BasicQuery; import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Meta; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.data.mongodb.core.query.Update; -import org.springframework.data.mongodb.core.validation.Validator; -import org.springframework.data.projection.ProjectionInformation; -import org.springframework.data.projection.SpelAwareProxyProjectionFactory; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.data.mongodb.core.query.UpdateDefinition.ArrayFilter; +import org.springframework.data.projection.EntityProjection; import org.springframework.data.util.Optionals; -import org.springframework.lang.Nullable; +import org.springframework.lang.Contract; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; import org.springframework.util.CollectionUtils; +import org.springframework.util.NumberUtils; import org.springframework.util.ObjectUtils; import org.springframework.util.ResourceUtils; import org.springframework.util.StringUtils; import com.mongodb.ClientSessionOptions; import com.mongodb.CursorType; -import com.mongodb.DBCollection; -import com.mongodb.DBCursor; -import com.mongodb.Mongo; import com.mongodb.MongoException; import com.mongodb.ReadPreference; import com.mongodb.WriteConcern; -import com.mongodb.client.model.*; +import com.mongodb.client.model.CountOptions; +import com.mongodb.client.model.CreateCollectionOptions; +import com.mongodb.client.model.CreateViewOptions; +import com.mongodb.client.model.DeleteOptions; +import com.mongodb.client.model.EstimatedDocumentCountOptions; +import com.mongodb.client.model.FindOneAndDeleteOptions; +import com.mongodb.client.model.FindOneAndReplaceOptions; +import com.mongodb.client.model.FindOneAndUpdateOptions; +import com.mongodb.client.model.ReturnDocument; +import com.mongodb.client.model.UpdateOptions; import com.mongodb.client.model.changestream.FullDocument; import com.mongodb.client.result.DeleteResult; +import com.mongodb.client.result.InsertOneResult; import com.mongodb.client.result.UpdateResult; -import com.mongodb.reactivestreams.client.*; +import com.mongodb.reactivestreams.client.AggregatePublisher; +import com.mongodb.reactivestreams.client.ChangeStreamPublisher; +import com.mongodb.reactivestreams.client.ClientSession; +import com.mongodb.reactivestreams.client.DistinctPublisher; +import com.mongodb.reactivestreams.client.FindPublisher; +import com.mongodb.reactivestreams.client.MapReducePublisher; +import com.mongodb.reactivestreams.client.MongoClient; +import com.mongodb.reactivestreams.client.MongoCollection; +import com.mongodb.reactivestreams.client.MongoDatabase; /** * Primary implementation of {@link ReactiveMongoOperations}. It simplifies the use of Reactive MongoDB usage and helps @@ -124,32 +165,33 @@ * extract results. This class executes BSON queries or updates, initiating iteration over {@link FindPublisher} and * catching MongoDB exceptions and translating them to the generic, more informative exception hierarchy defined in the * org.springframework.dao package. Can be used within a service implementation via direct instantiation with a - * {@link SimpleReactiveMongoDatabaseFactory} reference, or get prepared in an application context and given to services - * as bean reference. Note: The {@link SimpleReactiveMongoDatabaseFactory} should always be configured as a bean in the - * application context, in the first case given to the service directly, in the second case to the prepared template. + * {@link ReactiveMongoDatabaseFactory} reference, or get prepared in an application context and given to services as + * bean reference. + *

          + * Note: The {@link ReactiveMongoDatabaseFactory} should always be configured as a bean in the application context, in + * the first case given to the service directly, in the second case to the prepared template. + *

          {@link ReadPreference} and {@link com.mongodb.ReadConcern}

          + *

          + * {@code ReadPreference} and {@code ReadConcern} are generally considered from {@link Query} and + * {@link AggregationOptions} objects for the action to be executed on a particular {@link MongoCollection}. + *

          + * You can also set the default {@link #setReadPreference(ReadPreference) ReadPreference} on the template level to + * generally apply a {@link ReadPreference}. * * @author Mark Paluch * @author Christoph Strobl + * @author Roman Puchkovskiy + * @author Mathieu Ouellet + * @author Yadhukrishna S Pai + * @author Florian Lüdiger * @since 2.0 */ public class ReactiveMongoTemplate implements ReactiveMongoOperations, ApplicationContextAware { public static final DbRefResolver NO_OP_REF_RESOLVER = NoOpDbRefResolver.INSTANCE; - private static final Logger LOGGER = LoggerFactory.getLogger(ReactiveMongoTemplate.class); + private static final Log LOGGER = LogFactory.getLog(ReactiveMongoTemplate.class); private static final WriteResultChecking DEFAULT_WRITE_RESULT_CHECKING = WriteResultChecking.NONE; - private static final Collection> ITERABLE_CLASSES; - - static { - - Set> iterableClasses = new HashSet<>(); - iterableClasses.add(List.class); - iterableClasses.add(Collection.class); - iterableClasses.add(Iterator.class); - iterableClasses.add(Publisher.class); - - ITERABLE_CLASSES = Collections.unmodifiableCollection(iterableClasses); - } private final MongoConverter mongoConverter; private final MappingContext, MongoPersistentProperty> mappingContext; @@ -157,18 +199,24 @@ public class ReactiveMongoTemplate implements ReactiveMongoOperations, Applicati private final PersistenceExceptionTranslator exceptionTranslator; private final QueryMapper queryMapper; private final UpdateMapper updateMapper; - private final JsonSchemaMapper schemaMapper; - private final SpelAwareProxyProjectionFactory projectionFactory; private final ApplicationListener> indexCreatorListener; private final EntityOperations operations; + private final PropertyOperations propertyOperations; + private final QueryOperations queryOperations; + private final EntityLifecycleEventDelegate eventDelegate; private @Nullable WriteConcern writeConcern; private WriteConcernResolver writeConcernResolver = DefaultWriteConcernResolver.INSTANCE; private WriteResultChecking writeResultChecking = WriteResultChecking.NONE; private @Nullable ReadPreference readPreference; private @Nullable ApplicationEventPublisher eventPublisher; + private @Nullable ReactiveEntityCallbacks entityCallbacks; private @Nullable ReactiveMongoPersistentEntityIndexCreator indexCreator; + private SessionSynchronization sessionSynchronization = SessionSynchronization.ON_ACTUAL_TRANSACTION; + + private CountExecution countExecution = this::doExactCount; + /** * Constructor used for a basic template configuration. * @@ -212,31 +260,34 @@ public ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory, public ReactiveMongoTemplate(ReactiveMongoDatabaseFactory mongoDatabaseFactory, @Nullable MongoConverter mongoConverter, Consumer subscriptionExceptionHandler) { - Assert.notNull(mongoDatabaseFactory, "ReactiveMongoDatabaseFactory must not be null!"); + Assert.notNull(mongoDatabaseFactory, "ReactiveMongoDatabaseFactory must not be null"); this.mongoDatabaseFactory = mongoDatabaseFactory; this.exceptionTranslator = mongoDatabaseFactory.getExceptionTranslator(); this.mongoConverter = mongoConverter == null ? getDefaultMongoConverter() : mongoConverter; this.queryMapper = new QueryMapper(this.mongoConverter); this.updateMapper = new UpdateMapper(this.mongoConverter); - this.schemaMapper = new MongoJsonSchemaMapper(this.mongoConverter); - this.projectionFactory = new SpelAwareProxyProjectionFactory(); this.indexCreatorListener = new IndexCreatorEventListener(subscriptionExceptionHandler); // We always have a mapping context in the converter, whether it's a simple one or not this.mappingContext = this.mongoConverter.getMappingContext(); - this.operations = new EntityOperations(this.mappingContext); + this.operations = new EntityOperations(this.mongoConverter, this.queryMapper); + this.propertyOperations = new PropertyOperations(this.mongoConverter.getMappingContext()); + this.queryOperations = new QueryOperations(queryMapper, updateMapper, operations, propertyOperations, + mongoDatabaseFactory); + this.eventDelegate = new EntityLifecycleEventDelegate(); // We create indexes based on mapping events - if (this.mappingContext instanceof MongoMappingContext) { + if (this.mappingContext instanceof MongoMappingContext mongoMappingContext) { - MongoMappingContext mongoMappingContext = (MongoMappingContext) this.mappingContext; - this.indexCreator = new ReactiveMongoPersistentEntityIndexCreator(mongoMappingContext, this::indexOps); - this.eventPublisher = new MongoMappingEventPublisher(this.indexCreatorListener); + if (mongoMappingContext.isAutoIndexCreation()) { + this.indexCreator = new ReactiveMongoPersistentEntityIndexCreator(mongoMappingContext, this::indexOps); + this.eventPublisher = new MongoMappingEventPublisher(this.indexCreatorListener); - mongoMappingContext.setApplicationEventPublisher(this.eventPublisher); - this.mappingContext.getPersistentEntities() - .forEach(entity -> onCheckForIndexes(entity, subscriptionExceptionHandler)); + mongoMappingContext.setApplicationEventPublisher(this.eventPublisher); + this.mappingContext.getPersistentEntities() + .forEach(entity -> onCheckForIndexes(entity, subscriptionExceptionHandler)); + } } } @@ -247,12 +298,14 @@ private ReactiveMongoTemplate(ReactiveMongoDatabaseFactory dbFactory, ReactiveMo this.mongoConverter = that.mongoConverter; this.queryMapper = that.queryMapper; this.updateMapper = that.updateMapper; - this.schemaMapper = that.schemaMapper; - this.projectionFactory = that.projectionFactory; this.indexCreator = that.indexCreator; this.indexCreatorListener = that.indexCreatorListener; this.mappingContext = that.mappingContext; this.operations = that.operations; + this.propertyOperations = that.propertyOperations; + this.sessionSynchronization = that.sessionSynchronization; + this.queryOperations = that.queryOperations; + this.eventDelegate = that.eventDelegate; } private void onCheckForIndexes(MongoPersistentEntity entity, Consumer subscriptionExceptionHandler) { @@ -278,8 +331,7 @@ public void setWriteResultChecking(@Nullable WriteResultChecking resultChecking) /** * Configures the {@link WriteConcern} to be used with the template. If none is configured the {@link WriteConcern} - * configured on the {@link MongoDbFactory} will apply. If you configured a {@link Mongo} instance no - * {@link WriteConcern} will be used. + * configured on the {@link MongoDatabaseFactory} will apply. * * @param writeConcern can be {@literal null}. */ @@ -293,7 +345,8 @@ public void setWriteConcern(@Nullable WriteConcern writeConcern) { * @param writeConcernResolver can be {@literal null}. */ public void setWriteConcernResolver(@Nullable WriteConcernResolver writeConcernResolver) { - this.writeConcernResolver = writeConcernResolver; + this.writeConcernResolver = writeConcernResolver != null ? writeConcernResolver + : DefaultWriteConcernResolver.INSTANCE; } /** @@ -306,21 +359,92 @@ public void setReadPreference(ReadPreference readPreference) { this.readPreference = readPreference; } - /* - * (non-Javadoc) - * @see org.springframework.context.ApplicationContextAware#setApplicationContext(org.springframework.context.ApplicationContext) + /** + * Configure whether lifecycle events such as {@link AfterLoadEvent}, {@link BeforeSaveEvent}, etc. should be + * published or whether emission should be suppressed. Enabled by default. + * + * @param enabled {@code true} to enable entity lifecycle events; {@code false} to disable entity lifecycle events. + * @since 4.0 + * @see MongoMappingEvent */ + public void setEntityLifecycleEventsEnabled(boolean enabled) { + this.eventDelegate.setEventsEnabled(enabled); + } + + @Override public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { prepareIndexCreator(applicationContext); eventPublisher = applicationContext; - if (mappingContext instanceof ApplicationEventPublisherAware) { - ((ApplicationEventPublisherAware) mappingContext).setApplicationEventPublisher(eventPublisher); + eventDelegate.setPublisher(eventPublisher); + + if (entityCallbacks == null) { + setEntityCallbacks(ReactiveEntityCallbacks.create(applicationContext)); } - projectionFactory.setBeanFactory(applicationContext); - projectionFactory.setBeanClassLoader(applicationContext.getClassLoader()); + if (mappingContext instanceof ApplicationEventPublisherAware applicationEventPublisherAware) { + applicationEventPublisherAware.setApplicationEventPublisher(eventPublisher); + } + } + + /** + * Set the {@link ReactiveEntityCallbacks} instance to use when invoking + * {@link org.springframework.data.mapping.callback.EntityCallback callbacks} like the + * {@link ReactiveBeforeSaveCallback}.
          + * Overrides potentially existing {@link ReactiveEntityCallbacks}. + * + * @param entityCallbacks must not be {@literal null}. + * @throws IllegalArgumentException if the given instance is {@literal null}. + * @since 2.2 + */ + public void setEntityCallbacks(ReactiveEntityCallbacks entityCallbacks) { + + Assert.notNull(entityCallbacks, "EntityCallbacks must not be null"); + this.entityCallbacks = entityCallbacks; + } + + /** + * Configure whether to use estimated count. Defaults to exact counting. + * + * @param enabled use {@link com.mongodb.client.MongoCollection#estimatedDocumentCount()} for unpaged and empty + * {@link Query queries} if {@code true}. + * @since 3.4 + */ + public void useEstimatedCount(boolean enabled) { + useEstimatedCount(enabled, this::countCanBeEstimated); + } + + /** + * Configure whether to use estimated count based on the given {@link BiPredicate estimationFilter}. + * + * @param enabled use {@link com.mongodb.client.MongoCollection#estimatedDocumentCount()} for unpaged and empty + * {@link Query queries} if {@code true}. + * @param estimationFilter the {@link BiPredicate filter}. + * @since 3.4 + */ + private void useEstimatedCount(boolean enabled, BiFunction> estimationFilter) { + + if (enabled) { + + this.countExecution = (collectionName, filter, options) -> { + + return estimationFilter.apply(filter, options).flatMap(canEstimate -> { + if (!canEstimate) { + return doExactCount(collectionName, filter, options); + } + + EstimatedDocumentCountOptions estimatedDocumentCountOptions = new EstimatedDocumentCountOptions(); + if (options.getMaxTime(TimeUnit.MILLISECONDS) > 0) { + estimatedDocumentCountOptions.maxTime(options.getMaxTime(TimeUnit.MILLISECONDS), TimeUnit.MILLISECONDS); + } + + return doEstimatedCount(collectionName, estimatedDocumentCountOptions); + }); + }; + } else { + this.countExecution = this::doExactCount; + } } /** @@ -344,8 +468,8 @@ private void prepareIndexCreator(ApplicationContext context) { } } - if (context instanceof ConfigurableApplicationContext) { - ((ConfigurableApplicationContext) context).addApplicationListener(indexCreatorListener); + if (context instanceof ConfigurableApplicationContext configurableApplicationContext) { + configurableApplicationContext.addApplicationListener(indexCreatorListener); } } @@ -354,95 +478,66 @@ private void prepareIndexCreator(ApplicationContext context) { * * @return */ + @Override public MongoConverter getConverter() { return this.mongoConverter; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.String) - */ + @Override public ReactiveIndexOperations indexOps(String collectionName) { return new DefaultReactiveIndexOperations(this, collectionName, this.queryMapper); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#reactiveIndexOps(java.lang.Class) - */ + @Override public ReactiveIndexOperations indexOps(Class entityClass) { - return new DefaultReactiveIndexOperations(this, determineCollectionName(entityClass), this.queryMapper, - entityClass); + return new DefaultReactiveIndexOperations(this, getCollectionName(entityClass), this.queryMapper, entityClass); } + @Override public String getCollectionName(Class entityClass) { - return this.determineCollectionName(entityClass); + return operations.determineCollectionName(entityClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(java.lang.String) - */ + @Override public Mono executeCommand(String jsonCommand) { - Assert.notNull(jsonCommand, "Command must not be empty!"); + Assert.notNull(jsonCommand, "Command must not be empty"); return executeCommand(Document.parse(jsonCommand)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(org.bson.Document) - */ - public Mono executeCommand(final Document command) { + @Override + public Mono executeCommand(Document command) { return executeCommand(command, null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#executeCommand(org.bson.Document, com.mongodb.ReadPreference) - */ - public Mono executeCommand(final Document command, @Nullable ReadPreference readPreference) { + @Override + public Mono executeCommand(Document command, @Nullable ReadPreference readPreference) { - Assert.notNull(command, "Command must not be null!"); + Assert.notNull(command, "Command must not be null"); return createFlux(db -> readPreference != null ? db.runCommand(command, readPreference, Document.class) : db.runCommand(command, Document.class)).next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(java.lang.Class, org.springframework.data.mongodb.core.ReactiveCollectionCallback) - */ @Override public Flux execute(Class entityClass, ReactiveCollectionCallback action) { - return createFlux(determineCollectionName(entityClass), action); + return createFlux(getCollectionName(entityClass), action); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(org.springframework.data.mongodb.core.ReactiveDbCallback) - */ @Override public Flux execute(ReactiveDatabaseCallback action) { return createFlux(action); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#execute(java.lang.String, org.springframework.data.mongodb.core.ReactiveCollectionCallback) - */ + @Override public Flux execute(String collectionName, ReactiveCollectionCallback callback) { - Assert.notNull(callback, "ReactiveCollectionCallback must not be null!"); + Assert.notNull(callback, "ReactiveCollectionCallback must not be null"); return createFlux(collectionName, callback); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#withSession(org.reactivestreams.Publisher, java.util.function.Consumer) - */ @Override public ReactiveSessionScoped withSession(Publisher sessionProvider) { @@ -464,44 +559,15 @@ public Flux execute(ReactiveSessionCallback action, Consumer + * NOTE: MongoDB transactions require at least MongoDB 4.0. + * + * @since 2.2 */ - @Override - public ReactiveSessionScoped inTransaction(Publisher sessionProvider) { - - Mono cachedSession = Mono.from(sessionProvider).cache(); - - return new ReactiveSessionScoped() { - - @Override - public Flux execute(ReactiveSessionCallback action, Consumer doFinally) { - - return cachedSession.flatMapMany(session -> { - - if (!session.hasActiveTransaction()) { - session.startTransaction(); - } - - return Flux.usingWhen(Mono.just(session), // - s -> ReactiveMongoTemplate.this.withSession(action, s), // - ClientSession::commitTransaction, // - ClientSession::abortTransaction) // - .doFinally(signalType -> doFinally.accept(session)); - }); - } - }; + public void setSessionSynchronization(SessionSynchronization sessionSynchronization) { + this.sessionSynchronization = sessionSynchronization; } private Flux withSession(ReactiveSessionCallback action, ClientSession session) { @@ -510,21 +576,14 @@ private Flux withSession(ReactiveSessionCallback action, ClientSession ReactiveMongoTemplate.this); return Flux.from(action.doInSession(operations)) // - .subscriberContext(ctx -> ReactiveMongoContext.setSession(ctx, Mono.just(session))); + .contextWrite(ctx -> ReactiveMongoContext.setSession(ctx, Mono.just(session))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#withSession(com.mongodb.session.ClientSession) - */ + @Override public ReactiveMongoOperations withSession(ClientSession session) { return new ReactiveSessionBoundMongoTemplate(session, ReactiveMongoTemplate.this); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#withSession(com.mongodb.ClientSessionOptions) - */ @Override public ReactiveSessionScoped withSession(ClientSessionOptions sessionOptions) { return withSession(mongoDatabaseFactory.getSession(sessionOptions)); @@ -539,9 +598,10 @@ public ReactiveSessionScoped withSession(ClientSessionOptions sessionOptions) { */ public Flux createFlux(ReactiveDatabaseCallback callback) { - Assert.notNull(callback, "ReactiveDatabaseCallback must not be null!"); + Assert.notNull(callback, "ReactiveDatabaseCallback must not be null"); - return Flux.defer(() -> callback.doInDB(prepareDatabase(doGetDatabase()))).onErrorMap(translateException()); + return Mono.defer(this::doGetDatabase).flatMapMany(database -> callback.doInDB(prepareDatabase(database))) + .onErrorMap(translateException()); } /** @@ -551,11 +611,11 @@ public Flux createFlux(ReactiveDatabaseCallback callback) { * @param callback must not be {@literal null} * @return a {@link Mono} wrapping the {@link ReactiveDatabaseCallback}. */ - public Mono createMono(final ReactiveDatabaseCallback callback) { + public Mono createMono(ReactiveDatabaseCallback callback) { - Assert.notNull(callback, "ReactiveDatabaseCallback must not be null!"); + Assert.notNull(callback, "ReactiveDatabaseCallback must not be null"); - return Mono.defer(() -> Mono.from(callback.doInDB(prepareDatabase(doGetDatabase())))) + return Mono.defer(this::doGetDatabase).flatMap(database -> Mono.from(callback.doInDB(prepareDatabase(database)))) .onErrorMap(translateException()); } @@ -568,11 +628,11 @@ public Mono createMono(final ReactiveDatabaseCallback callback) { */ public Flux createFlux(String collectionName, ReactiveCollectionCallback callback) { - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(callback, "ReactiveDatabaseCallback must not be null!"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(callback, "ReactiveDatabaseCallback must not be null"); - Mono> collectionPublisher = Mono - .fromCallable(() -> getAndPrepareCollection(doGetDatabase(), collectionName)); + Mono> collectionPublisher = doGetDatabase() + .map(database -> getAndPrepareCollection(database, collectionName)); return collectionPublisher.flatMapMany(callback::doInCollection).onErrorMap(translateException()); } @@ -587,91 +647,113 @@ public Flux createFlux(String collectionName, ReactiveCollectionCallback< */ public Mono createMono(String collectionName, ReactiveCollectionCallback callback) { - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(callback, "ReactiveCollectionCallback must not be null!"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(callback, "ReactiveCollectionCallback must not be null"); - Mono> collectionPublisher = Mono - .fromCallable(() -> getAndPrepareCollection(doGetDatabase(), collectionName)); + Mono> collectionPublisher = doGetDatabase() + .map(database -> getAndPrepareCollection(database, collectionName)); return collectionPublisher.flatMap(collection -> Mono.from(callback.doInCollection(collection))) .onErrorMap(translateException()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class) - */ + @Override public Mono> createCollection(Class entityClass) { - return createCollection(determineCollectionName(entityClass)); + return createCollection(entityClass, operations.forType(entityClass).getCollectionOptions()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.Class, org.springframework.data.mongodb.core.CollectionOptions) - */ + @Override public Mono> createCollection(Class entityClass, @Nullable CollectionOptions collectionOptions) { - return doCreateCollection(determineCollectionName(entityClass), - convertToCreateCollectionOptions(collectionOptions, entityClass)); + + Assert.notNull(entityClass, "EntityClass must not be null"); + + CollectionOptions options = collectionOptions != null ? collectionOptions : CollectionOptions.empty(); + options = Optionals + .firstNonEmpty(() -> Optional.ofNullable(collectionOptions).flatMap(CollectionOptions::getCollation), + () -> operations.forType(entityClass).getCollation()) // + .map(options::collation).orElse(options); + + return doCreateCollection(getCollectionName(entityClass), convertToCreateCollectionOptions(options, entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.String) - */ + @Override public Mono> createCollection(String collectionName) { return doCreateCollection(collectionName, new CreateCollectionOptions()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#createCollection(java.lang.String, org.springframework.data.mongodb.core.CollectionOptions) - */ + @Override public Mono> createCollection(String collectionName, @Nullable CollectionOptions collectionOptions) { return doCreateCollection(collectionName, convertToCreateCollectionOptions(collectionOptions)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#getCollection(java.lang.String) - */ - public MongoCollection getCollection(final String collectionName) { - return execute((MongoDatabaseCallback>) db -> db.getCollection(collectionName)); + @Override + public Mono> createView(String name, Class source, AggregationPipeline pipeline, + @Nullable ViewOptions options) { + + return createView(name, getCollectionName(source), + queryOperations.createAggregation(Aggregation.newAggregation(source, pipeline.getOperations()), source), + options); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#collectionExists(java.lang.Class) - */ + @Override + public Mono> createView(String name, String source, AggregationPipeline pipeline, + @Nullable ViewOptions options) { + + return createView(name, source, + queryOperations.createAggregation(Aggregation.newAggregation(pipeline.getOperations()), (Class) null), + options); + } + + private Mono> createView(String name, String source, AggregationDefinition aggregation, + @Nullable ViewOptions options) { + return doCreateView(name, source, aggregation.getAggregationPipeline(), options); + } + + protected Mono> doCreateView(String name, String source, List pipeline, + @Nullable ViewOptions options) { + + CreateViewOptions viewOptions = new CreateViewOptions(); + if (options != null) { + options.getCollation().map(Collation::toMongoCollation).ifPresent(viewOptions::collation); + } + + return execute(db -> { + return Flux.from(db.createView(name, source, pipeline, viewOptions)) + .then(Mono.fromSupplier(() -> db.getCollection(name))); + }).next(); + } + + @Override + public Mono> getCollection(String collectionName) { + + Assert.notNull(collectionName, "Collection name must not be null"); + + return createMono(db -> Mono.just(db.getCollection(collectionName))); + } + + @Override public Mono collectionExists(Class entityClass) { - return collectionExists(determineCollectionName(entityClass)); + return collectionExists(getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#collectionExists(java.lang.String) - */ - public Mono collectionExists(final String collectionName) { - return createMono(db -> Flux.from(db.listCollectionNames()) // - .filter(s -> s.equals(collectionName)) // - .map(s -> true) // - .single(false)); + @Override + public Mono collectionExists(String collectionName) { + return createMono( + db -> Flux.from(db.listCollectionNames()) // + .filter(s -> s.equals(collectionName)) // + .map(s -> true) // + .single(false)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#dropCollection(java.lang.Class) - */ + @Override public Mono dropCollection(Class entityClass) { - return dropCollection(determineCollectionName(entityClass)); + return dropCollection(getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#dropCollection(java.lang.String) - */ - public Mono dropCollection(final String collectionName) { + @Override + public Mono dropCollection(String collectionName) { return createMono(collectionName, MongoCollection::drop).doOnSuccess(success -> { if (LOGGER.isDebugEnabled()) { @@ -680,66 +762,73 @@ public Mono dropCollection(final String collectionName) { }).then(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#getCollectionNames() - */ - public Flux getCollectionNames() { - return createFlux(MongoDatabase::listCollectionNames); + @Override + public ReactiveBulkOperations bulkOps(BulkMode mode, String collectionName) { + return bulkOps(mode, null, collectionName); } - public MongoDatabase getMongoDatabase() { - return doGetDatabase(); + @Override + public ReactiveBulkOperations bulkOps(BulkMode mode, Class entityClass) { + return bulkOps(mode, entityClass, getCollectionName(entityClass)); } - protected MongoDatabase doGetDatabase() { + @Override + public ReactiveBulkOperations bulkOps(BulkMode mode, @Nullable Class entityType, String collectionName) { + + Assert.notNull(mode, "BulkMode must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + + DefaultReactiveBulkOperations operations = new DefaultReactiveBulkOperations(this, collectionName, + new ReactiveBulkOperationContext(mode, Optional.ofNullable(getPersistentEntity(entityType)), queryMapper, + updateMapper, eventPublisher, entityCallbacks)); + + operations.setDefaultWriteConcern(writeConcern); + + return operations; + } + + @Override + public Flux getCollectionNames() { + return createFlux(db -> db.listCollectionNames()); + } + + public Mono getMongoDatabase() { return mongoDatabaseFactory.getMongoDatabase(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findOne(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + protected Mono doGetDatabase() { + return ReactiveMongoDatabaseUtils.getDatabase(mongoDatabaseFactory, sessionSynchronization); + } + + @Override public Mono findOne(Query query, Class entityClass) { - return findOne(query, entityClass, determineCollectionName(entityClass)); + return findOne(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findOne(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + @Override public Mono findOne(Query query, Class entityClass, String collectionName) { if (ObjectUtils.isEmpty(query.getSortObject())) { - return doFindOne(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass, - query.getCollation().orElse(null)); + return doFindOne(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), entityClass, new QueryFindPublisherPreparer(query, entityClass)); } query.limit(1); return find(query, entityClass, collectionName).next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#exists(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + @Override public Mono exists(Query query, Class entityClass) { - return exists(query, entityClass, determineCollectionName(entityClass)); + return exists(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#exists(org.springframework.data.mongodb.core.query.Query, java.lang.String) - */ + @Override public Mono exists(Query query, String collectionName) { return exists(query, null, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#exists(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ - public Mono exists(final Query query, @Nullable Class entityClass, String collectionName) { + @Override + public Mono exists(Query query, @Nullable Class entityClass, String collectionName) { if (query == null) { throw new InvalidDataAccessApiUsageException("Query passed in to exist can't be null"); @@ -747,99 +836,142 @@ public Mono exists(final Query query, @Nullable Class entityClass, S return createFlux(collectionName, collection -> { - Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), getPersistentEntity(entityClass)); - FindPublisher findPublisher = collection.find(mappedQuery, Document.class) - .projection(new Document("_id", 1)); + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); + QueryContext queryContext = queryOperations.createQueryContext(query); + Document filter = queryContext.getMappedQuery(entityClass, this::getPersistentEntity); - findPublisher = query.getCollation().map(Collation::toMongoCollation).map(findPublisher::collation) - .orElse(findPublisher); + FindPublisher findPublisher = collectionPreparer.prepare(collection).find(filter, Document.class) + .projection(new Document(FieldName.ID.name(), 1)); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("exists: %s in collection: %s", serializeToJsonSafely(filter), collectionName)); + } + + queryContext.applyCollation(entityClass, findPublisher::collation); return findPublisher.limit(1); }).hasElements(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#find(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + @Override public Flux find(Query query, Class entityClass) { - return find(query, entityClass, determineCollectionName(entityClass)); + return find(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#find(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + @Override public Flux find(@Nullable Query query, Class entityClass, String collectionName) { if (query == null) { return findAll(entityClass, collectionName); } - return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass, - new QueryFindPublisherPreparer(query, entityClass)); + return doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), entityClass, new QueryFindPublisherPreparer(query, entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findById(java.lang.Object, java.lang.Class) - */ + @Override + public Mono> scroll(Query query, Class entityType) { + + Assert.notNull(entityType, "Entity type must not be null"); + + return scroll(query, entityType, getCollectionName(entityType)); + } + + @Override + public Mono> scroll(Query query, Class entityType, String collectionName) { + return doScroll(query, entityType, entityType, QueryResultConverter.entity(), collectionName); + } + + Mono> doScroll(Query query, Class sourceClass, Class targetClass, + QueryResultConverter resultConverter, String collectionName) { + + Assert.notNull(query, "Query must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(sourceClass, "Entity type must not be null"); + Assert.notNull(targetClass, "Target type must not be null"); + + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); + DocumentCallback callback = getResultReader(projection, collectionName, resultConverter); + int limit = query.isLimited() ? query.getLimit() + 1 : Integer.MAX_VALUE; + + if (query.hasKeyset()) { + + KeysetScrollQuery keysetPaginationQuery = ScrollUtils.createKeysetPaginationQuery(query, + operations.getIdPropertyName(sourceClass)); + + Mono> result = doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), + keysetPaginationQuery.query(), keysetPaginationQuery.fields(), sourceClass, + new QueryFindPublisherPreparer(query, keysetPaginationQuery.sort(), limit, 0, sourceClass), callback) + .collectList(); + + return result.map(it -> ScrollUtils.createWindow(query, it, sourceClass, operations)); + } + + Mono> result = doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), sourceClass, + new QueryFindPublisherPreparer(query, query.getSortObject(), limit, query.getSkip(), sourceClass), callback) + .collectList(); + + return result.map( + it -> ScrollUtils.createWindow(it, query.getLimit(), OffsetScrollPosition.positionFunction(query.getSkip()))); + } + + @Override public Mono findById(Object id, Class entityClass) { - return findById(id, entityClass, determineCollectionName(entityClass)); + return findById(id, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findById(java.lang.Object, java.lang.Class, java.lang.String) - */ + @Override public Mono findById(Object id, Class entityClass, String collectionName) { String idKey = operations.getIdPropertyName(entityClass); - return doFindOne(collectionName, new Document(idKey, id), null, entityClass, null); + return doFindOne(collectionName, CollectionPreparer.identity(), new Document(idKey, id), null, entityClass, + (Collation) null); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findDistinct(org.springframework.data.mongodb.core.query.Query, java.lang.String, java.lang.Class, java.lang.Class) - */ + @Override public Flux findDistinct(Query query, String field, Class entityClass, Class resultClass) { - return findDistinct(query, field, determineCollectionName(entityClass), entityClass, resultClass); + return findDistinct(query, field, getCollectionName(entityClass), entityClass, resultClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findDistinct(org.springframework.data.mongodb.core.query.Query, java.lang.String, java.lang.String, java.lang.Class, java.lang.Class) - */ + @Override @SuppressWarnings("unchecked") public Flux findDistinct(Query query, String field, String collectionName, Class entityClass, Class resultClass) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(field, "Field must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(entityClass, "EntityClass must not be null!"); - Assert.notNull(resultClass, "ResultClass must not be null!"); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(field, "Field must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(entityClass, "EntityClass must not be null"); + Assert.notNull(resultClass, "ResultClass must not be null"); MongoPersistentEntity entity = getPersistentEntity(entityClass); + DistinctQueryContext distinctQueryContext = queryOperations.distinctQueryContext(query, field); - Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), entity); - String mappedFieldName = queryMapper.getMappedFields(new Document(field, 1), entity).keySet().iterator().next(); - - Class mongoDriverCompatibleType = mongoDatabaseFactory.getCodecFor(resultClass) // - .map(Codec::getEncoderClass) // - .orElse((Class) BsonValue.class); + Document mappedQuery = distinctQueryContext.getMappedQuery(entity); + String mappedFieldName = distinctQueryContext.getMappedFieldName(entity); + Class mongoDriverCompatibleType = distinctQueryContext.getDriverCompatibleClass(resultClass); + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); Flux result = execute(collectionName, collection -> { - DistinctPublisher publisher = collection.distinct(mappedFieldName, mappedQuery, mongoDriverCompatibleType); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Executing findDistinct using query %s for field: %s in collection: %s", + serializeToJsonSafely(mappedQuery), field, collectionName)); + } - return query.getCollation().map(Collation::toMongoCollation).map(publisher::collation).orElse(publisher); + FindPublisherPreparer preparer = new QueryFindPublisherPreparer(query, entityClass); + + DistinctPublisher publisher = collectionPreparer.prepare(collection).distinct(mappedFieldName, mappedQuery, + mongoDriverCompatibleType); + distinctQueryContext.applyCollation(entityClass, publisher::collation); + return publisher; }); if (resultClass == Object.class || mongoDriverCompatibleType != resultClass) { - Class targetType = getMostSpecificConversionTargetType(resultClass, entityClass, field); + Class targetType = distinctQueryContext.getMostSpecificConversionTargetType(resultClass, entityClass); MongoConverter converter = getConverter(); result = result.map(it -> converter.mapValueToTargetType(it, targetType, NO_OP_REF_RESOLVER)); @@ -848,574 +980,595 @@ public Flux findDistinct(Query query, String field, String collectionName return (Flux) result; } - /** - * @param userType must not be {@literal null}. - * @param domainType must not be {@literal null}. - * @param field must not be {@literal null}. - * @return the most specific conversion target type depending on user preference and domain type property. - * @since 2.1 - */ - private static Class getMostSpecificConversionTargetType(Class userType, Class domainType, String field) { - - Class conversionTargetType = userType; - try { - - Class propertyType = PropertyPath.from(field, domainType).getLeafProperty().getLeafType(); - - // use the more specific type but favor UserType over property one - if (ClassUtils.isAssignable(userType, propertyType)) { - conversionTargetType = propertyType; - } - - } catch (PropertyReferenceException e) { - // just don't care about it as we default to Object.class anyway. - } - - return conversionTargetType; - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.String, java.lang.Class) - */ @Override public Flux aggregate(TypedAggregation aggregation, String inputCollectionName, Class outputType) { - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - - AggregationOperationContext context = new TypeBasedAggregationOperationContext(aggregation.getInputType(), - mappingContext, queryMapper); - return aggregate(aggregation, inputCollectionName, outputType, context); + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return doAggregate(aggregation, inputCollectionName, aggregation.getInputType(), outputType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.TypedAggregation, java.lang.Class) - */ @Override public Flux aggregate(TypedAggregation aggregation, Class outputType) { - return aggregate(aggregation, determineCollectionName(aggregation.getInputType()), outputType); + + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + return aggregate(aggregation, getCollectionName(aggregation.getInputType()), outputType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.Class, java.lang.Class) - */ @Override public Flux aggregate(Aggregation aggregation, Class inputType, Class outputType) { - - return aggregate(aggregation, determineCollectionName(inputType), outputType, - new TypeBasedAggregationOperationContext(inputType, mappingContext, queryMapper)); + return doAggregate(aggregation, getCollectionName(inputType), inputType, outputType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#aggregate(org.springframework.data.mongodb.core.aggregation.Aggregation, java.lang.String, java.lang.Class) - */ @Override public Flux aggregate(Aggregation aggregation, String collectionName, Class outputType) { - return aggregate(aggregation, collectionName, outputType, null); + return doAggregate(aggregation, collectionName, null, outputType); } - /** - * @param aggregation must not be {@literal null}. - * @param collectionName must not be {@literal null}. - * @param outputType must not be {@literal null}. - * @param context can be {@literal null} and will be defaulted to {@link Aggregation#DEFAULT_CONTEXT}. - * @return never {@literal null}. - */ - protected Flux aggregate(Aggregation aggregation, String collectionName, Class outputType, - @Nullable AggregationOperationContext context) { + protected Flux doAggregate(Aggregation aggregation, String collectionName, @Nullable Class inputType, + Class outputType) { + return doAggregate(aggregation, collectionName, inputType, outputType, QueryResultConverter.entity()); + } - Assert.notNull(aggregation, "Aggregation pipeline must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - Assert.notNull(outputType, "Output type must not be null!"); + Flux doAggregate(Aggregation aggregation, String collectionName, @Nullable Class inputType, + Class outputType, QueryResultConverter resultConverter) { - AggregationUtil aggregationUtil = new AggregationUtil(queryMapper, mappingContext); - AggregationOperationContext rootContext = aggregationUtil.prepareAggregationContext(aggregation, context); + Assert.notNull(aggregation, "Aggregation pipeline must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); + Assert.notNull(outputType, "Output type must not be null"); AggregationOptions options = aggregation.getOptions(); - List pipeline = aggregationUtil.createPipeline(aggregation, rootContext); + Assert.isTrue(!options.isExplain(), "Cannot use explain option with streaming"); - Assert.isTrue(!options.isExplain(), "Cannot use explain option with streaming!"); + AggregationDefinition ctx = queryOperations.createAggregation(aggregation, inputType); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Streaming aggregation: {} in collection {}", serializeToJsonSafely(pipeline), collectionName); + LOGGER.debug(String.format("Streaming aggregation: %s in collection %s", + serializeToJsonSafely(ctx.getAggregationPipeline()), collectionName)); } - ReadDocumentCallback readCallback = new ReadDocumentCallback<>(mongoConverter, outputType, collectionName); - return execute(collectionName, collection -> aggregateAndMap(collection, pipeline, options, readCallback)); + DocumentCallback readCallback = new QueryResultConverterCallback<>(resultConverter, + new ReadDocumentCallback<>(mongoConverter, outputType, collectionName)); + return execute(collectionName, collection -> aggregateAndMap(collection, ctx.getAggregationPipeline(), + ctx.isOutOrMerge(), options, readCallback, ctx.getInputType())); } private Flux aggregateAndMap(MongoCollection collection, List pipeline, - AggregationOptions options, ReadDocumentCallback readCallback) { + boolean isOutOrMerge, AggregationOptions options, DocumentCallback readCallback, + @Nullable Class inputType) { - AggregatePublisher cursor = collection.aggregate(pipeline, Document.class) - .allowDiskUse(options.isAllowDiskUse()); + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(options); + AggregatePublisher cursor = collectionPreparer.prepare(collection).aggregate(pipeline, Document.class); + + if (options.isAllowDiskUseSet()) { + cursor = cursor.allowDiskUse(options.isAllowDiskUse()); + } if (options.getCursorBatchSize() != null) { cursor = cursor.batchSize(options.getCursorBatchSize()); } - if (options.getCollation().isPresent()) { - cursor = cursor.collation(options.getCollation().map(Collation::toMongoCollation).get()); + options.getComment().ifPresent(cursor::comment); + + HintFunction hintFunction = options.getHintObject().map(HintFunction::from).orElseGet(HintFunction::empty); + if (hintFunction.isPresent()) { + cursor = hintFunction.apply(mongoDatabaseFactory, cursor::hintString, cursor::hint); } - return Flux.from(cursor).map(readCallback::doWith); + Optionals.firstNonEmpty(options::getCollation, () -> operations.forType(inputType).getCollation()) // + .map(Collation::toMongoCollation) // + .ifPresent(cursor::collation); + + if (options.hasExecutionTimeLimit()) { + cursor = cursor.maxTime(options.getMaxTime().toMillis(), TimeUnit.MILLISECONDS); + } + + if (options.isSkipResults()) { + return (isOutOrMerge ? Flux.from(cursor.toCollection()) : Flux.from(cursor.first())).thenMany(Mono.empty()); + } + + return Flux.from(cursor).flatMapSequential(readCallback::doWith); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#geoNear(org.springframework.data.mongodb.core.query.NearQuery, java.lang.Class) - */ @Override public Flux> geoNear(NearQuery near, Class entityClass) { - return geoNear(near, entityClass, determineCollectionName(entityClass)); + return geoNear(near, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#geoNear(org.springframework.data.mongodb.core.query.NearQuery, java.lang.Class, java.lang.String) - */ @Override public Flux> geoNear(NearQuery near, Class entityClass, String collectionName) { return geoNear(near, entityClass, collectionName, entityClass); } - @SuppressWarnings("unchecked") protected Flux> geoNear(NearQuery near, Class entityClass, String collectionName, Class returnType) { + return doGeoNear(near, entityClass, collectionName, returnType, QueryResultConverter.entity()); + } + + @SuppressWarnings("unchecked") + Flux> doGeoNear(NearQuery near, Class entityClass, String collectionName, Class returnType, + QueryResultConverter resultConverter) { if (near == null) { - throw new InvalidDataAccessApiUsageException("NearQuery must not be null!"); + throw new InvalidDataAccessApiUsageException("NearQuery must not be null"); } if (entityClass == null) { - throw new InvalidDataAccessApiUsageException("Entity class must not be null!"); + throw new InvalidDataAccessApiUsageException("Entity class must not be null"); } - String collection = StringUtils.hasText(collectionName) ? collectionName : determineCollectionName(entityClass); - Document nearDbObject = near.toDocument(); + String collection = StringUtils.hasText(collectionName) ? collectionName : getCollectionName(entityClass); + String distanceField = operations.nearQueryDistanceFieldName(entityClass); + EntityProjection projection = operations.introspectProjection(returnType, entityClass); - Document command = new Document("geoNear", collection); - command.putAll(nearDbObject); + GeoNearResultDocumentCallback callback = new GeoNearResultDocumentCallback<>(distanceField, + getResultReader(projection, collectionName, resultConverter), near.getMetric()); - return Flux.defer(() -> { - - if (nearDbObject.containsKey("query")) { - Document query = (Document) nearDbObject.get("query"); - command.put("query", queryMapper.getMappedObject(query, getPersistentEntity(entityClass))); - } - - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Executing geoNear using: {} for class: {} in collection: {}", serializeToJsonSafely(command), - entityClass, collectionName); - } - - GeoNearResultDbObjectCallback callback = new GeoNearResultDbObjectCallback<>( - new ProjectingReadCallback<>(mongoConverter, entityClass, returnType, collectionName), near.getMetric()); + Builder optionsBuilder = AggregationOptions.builder(); + if (near.hasReadPreference()) { + optionsBuilder.readPreference(near.getReadPreference()); + } - return executeCommand(command, this.readPreference).flatMapMany(document -> { + if (near.hasReadConcern()) { + optionsBuilder.readConcern(near.getReadConcern()); + } - List results = document.get("results", List.class); + optionsBuilder.collation(near.getCollation()); - return results == null ? Flux.empty() : Flux.fromIterable(results); + Aggregation $geoNear = TypedAggregation.newAggregation(entityClass, Aggregation.geoNear(near, distanceField)) + .withOptions(optionsBuilder.build()); - }).skip(near.getSkip() != null ? near.getSkip() : 0).map(callback::doWith); - }); + return aggregate($geoNear, collection, Document.class) // + .flatMapSequential(callback::doWith); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class) - */ - public Mono findAndModify(Query query, Update update, Class entityClass) { - return findAndModify(query, update, new FindAndModifyOptions(), entityClass, determineCollectionName(entityClass)); + @Override + public Mono findAndModify(Query query, UpdateDefinition update, Class entityClass) { + return findAndModify(query, update, new FindAndModifyOptions(), entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class, java.lang.String) - */ - public Mono findAndModify(Query query, Update update, Class entityClass, String collectionName) { + @Override + public Mono findAndModify(Query query, UpdateDefinition update, Class entityClass, String collectionName) { return findAndModify(query, update, new FindAndModifyOptions(), entityClass, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, org.springframework.data.mongodb.core.FindAndModifyOptions, java.lang.Class) - */ - public Mono findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass) { - return findAndModify(query, update, options, entityClass, determineCollectionName(entityClass)); + @Override + public Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, + Class entityClass) { + return findAndModify(query, update, options, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndModify(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, org.springframework.data.mongodb.core.FindAndModifyOptions, java.lang.Class, java.lang.String) - */ - public Mono findAndModify(Query query, Update update, FindAndModifyOptions options, Class entityClass, - String collectionName) { + public Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, + Class entityClass, String collectionName, QueryResultConverter resultConverter) { + + Assert.notNull(options, "Options must not be null "); + Assert.notNull(entityClass, "Entity class must not be null"); FindAndModifyOptions optionsToUse = FindAndModifyOptions.of(options); Optionals.ifAllPresent(query.getCollation(), optionsToUse.getCollation(), (l, r) -> { throw new IllegalArgumentException( - "Both Query and FindAndModifyOptions define a collation. Please provide the collation only via one of the two."); + "Both Query and FindAndModifyOptions define a collation; Please provide the collation only via one of the two"); }); - query.getCollation().ifPresent(optionsToUse::collation); + if (!optionsToUse.getCollation().isPresent()) { + operations.forType(entityClass).getCollation(query).ifPresent(optionsToUse::collation); + } - return doFindAndModify(collectionName, query.getQueryObject(), query.getFieldsObject(), - getMappedSortObject(query, entityClass), entityClass, update, optionsToUse); + return doFindAndModify(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), getMappedSortObject(query, entityClass), entityClass, update, optionsToUse, + resultConverter); + } + + @Override + public Mono findAndModify(Query query, UpdateDefinition update, FindAndModifyOptions options, + Class entityClass, String collectionName) { + return findAndModify(query, update, options, entityClass, collectionName, QueryResultConverter.entity()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndReplace(org.springframework.data.mongodb.core.query.Query, java.lang.Object, org.springframework.data.mongodb.core.FindAndReplaceOptions, java.lang.Class, java.lang.String, java.lang.Class) - */ @Override public Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions options, Class entityType, String collectionName, Class resultType) { + return findAndReplace(query, replacement, options, entityType, collectionName, resultType, + QueryResultConverter.entity()); + } + + @SuppressWarnings("NullAway") + public Mono findAndReplace(Query query, S replacement, FindAndReplaceOptions options, + Class entityType, String collectionName, Class resultType, + QueryResultConverter resultConverter) { - Assert.notNull(query, "Query must not be null!"); - Assert.notNull(replacement, "Replacement must not be null!"); - Assert.notNull(options, "Options must not be null! Use FindAndReplaceOptions#empty() instead."); - Assert.notNull(entityType, "Entity class must not be null!"); - Assert.notNull(collectionName, "CollectionName must not be null!"); - Assert.notNull(resultType, "ResultType must not be null! Use Object.class instead."); + Assert.notNull(query, "Query must not be null"); + Assert.notNull(replacement, "Replacement must not be null"); + Assert.notNull(options, "Options must not be null Use FindAndReplaceOptions#empty() instead"); + Assert.notNull(entityType, "Entity class must not be null"); + Assert.notNull(collectionName, "CollectionName must not be null"); + Assert.notNull(resultType, "ResultType must not be null Use Object.class instead"); - Assert.isTrue(query.getLimit() <= 1, "Query must not define a limit other than 1 ore none!"); - Assert.isTrue(query.getSkip() <= 0, "Query must not define skip."); + Assert.isTrue(query.getLimit() <= 1, "Query must not define a limit other than 1 ore none"); + Assert.isTrue(query.getSkip() <= 0, "Query must not define skip"); MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityType); + QueryContext queryContext = queryOperations.createQueryContext(query); + EntityProjection projection = operations.introspectProjection(resultType, entityType); - Document mappedQuery = queryMapper.getMappedObject(query.getQueryObject(), entity); - Document mappedFields = queryMapper.getMappedFields(query.getFieldsObject(), entity); - Document mappedSort = queryMapper.getMappedSort(query.getSortObject(), entity); + Document mappedQuery = queryContext.getMappedQuery(entity); + Document mappedFields = queryContext.getMappedFields(entity, projection); + Document mappedSort = queryContext.getMappedSort(entity); + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); - Document mappedReplacement = operations.forEntity(replacement).toMappedDocument(this.mongoConverter).getDocument(); + return Mono.defer(() -> { + + PersistableEntityModel pem = PersistableEntityModel.of(replacement, collectionName); + + maybeEmitEvent(new BeforeConvertEvent<>(pem.getSource(), pem.getCollection())); + + return maybeCallBeforeConvert(pem.getSource(), pem.getCollection()).map(pem::mutate).flatMap(it -> { + PersistableEntityModel mapped = it + .addTargetDocument(operations.forEntity(it.getSource()).toMappedDocument(mongoConverter).getDocument()); + maybeEmitEvent(new BeforeSaveEvent(mapped.getSource(), mapped.getTarget(), mapped.getCollection())); - return doFindAndReplace(collectionName, mappedQuery, mappedFields, mappedSort, - query.getCollation().map(Collation::toMongoCollation).orElse(null), entityType, mappedReplacement, options, - resultType); + return maybeCallBeforeSave(it.getSource(), mapped.getTarget(), mapped.getCollection()) + .map(potentiallyModified -> PersistableEntityModel.of(potentiallyModified, mapped.getTarget(), + mapped.getCollection())); + }).flatMap(it -> { + + Mono afterFindAndReplace = doFindAndReplace(it.getCollection(), collectionPreparer, mappedQuery, + mappedFields, mappedSort, queryContext.getCollation(entityType).orElse(null), entityType, it.getTarget(), + options, projection, resultConverter); + return afterFindAndReplace.flatMap(saved -> { + maybeEmitEvent(new AfterSaveEvent<>(saved, it.getTarget(), it.getCollection())); + return maybeCallAfterSave(saved, it.getTarget(), it.getCollection()); + }); + }); + }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + @Override public Mono findAndRemove(Query query, Class entityClass) { - return findAndRemove(query, entityClass, determineCollectionName(entityClass)); + return findAndRemove(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + @Override public Mono findAndRemove(Query query, Class entityClass, String collectionName) { - return doFindAndRemove(collectionName, query.getQueryObject(), query.getFieldsObject(), - getMappedSortObject(query, entityClass), query.getCollation().orElse(null), entityClass); + operations.forType(entityClass).getCollation(query); + return doFindAndRemove(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), getMappedSortObject(query, entityClass), + operations.forType(entityClass).getCollation(query).orElse(null), entityClass); } /* * (non-Javadoc) * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class) */ + @Override public Mono count(Query query, Class entityClass) { - Assert.notNull(entityClass, "Entity class must not be null!"); + Assert.notNull(entityClass, "Entity class must not be null"); - return count(query, entityClass, determineCollectionName(entityClass)); + return count(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.String) - */ - public Mono count(final Query query, String collectionName) { + @Override + public Mono count(Query query, String collectionName) { return count(query, null, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + @Override public Mono count(Query query, @Nullable Class entityClass, String collectionName) { - Assert.notNull(query, "Query must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(query, "Query must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); return createMono(collectionName, collection -> { - final Document Document = query == null ? null - : queryMapper.getMappedObject(query.getQueryObject(), - entityClass == null ? null : mappingContext.getPersistentEntity(entityClass)); + CountContext countContext = queryOperations.countQueryContext(query); + + CountOptions options = countContext.getCountOptions(entityClass); + Document filter = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity); - CountOptions options = new CountOptions(); - if (query != null) { - query.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug( + String.format("Executing count: %s in collection: %s", serializeToJsonSafely(filter), collectionName)); } - return collection.count(Document, options); + return doCount(collectionName, filter, options); }); } + /** + * Run the actual count operation against the collection with given name. + * + * @param collectionName the name of the collection to count matching documents in. + * @param filter the filter to apply. Must not be {@literal null}. + * @param options options to apply. Like collation and the such. + * @return + */ + protected Mono doCount(String collectionName, Document filter, CountOptions options) { + + if (LOGGER.isDebugEnabled()) { + LOGGER + .debug(String.format("Executing count: %s in collection: %s", serializeToJsonSafely(filter), collectionName)); + } + + return countExecution.countDocuments(collectionName, filter, options); + } + /* * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(reactor.core.publisher.Mono) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#estimatedCount(java.lang.String) */ @Override - public Mono insert(Mono objectToSave) { + public Mono estimatedCount(String collectionName) { + return doEstimatedCount(collectionName, new EstimatedDocumentCountOptions()); + } - Assert.notNull(objectToSave, "Mono to insert must not be null!"); + protected Mono doEstimatedCount(String collectionName, EstimatedDocumentCountOptions options) { + return createMono(collectionName, collection -> collection.estimatedDocumentCount(options)); + } - return objectToSave.flatMap(this::insert); + @Override + public Mono exactCount(Query query, @Nullable Class entityClass, String collectionName) { + + CountContext countContext = queryOperations.countQueryContext(query); + + CountOptions options = countContext.getCountOptions(entityClass); + Document mappedQuery = countContext.getMappedQuery(entityClass, mappingContext::getPersistentEntity); + + return doExactCount(collectionName, mappedQuery, options); + } + + protected Mono doExactCount(String collectionName, Document filter, CountOptions options) { + + return createMono(collectionName, + collection -> collection.countDocuments(CountQuery.of(filter).toQueryDocument(), options)); + } + + protected Mono countCanBeEstimated(Document filter, CountOptions options) { + + if (!filter.isEmpty() || !isEmptyOptions(options)) { + return Mono.just(false); + } + return ReactiveMongoDatabaseUtils.isTransactionActive(getMongoDatabaseFactory()).map(it -> !it); + } + + private boolean isEmptyOptions(CountOptions options) { + return options.getLimit() <= 0 && options.getSkip() <= 0; } /* * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(reactor.core.publisher.Mono, java.lang.Class) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(reactor.core.publisher.Mono) */ + @Override + public Mono insert(Mono objectToSave) { + + Assert.notNull(objectToSave, "Mono to insert must not be null"); + + return objectToSave.flatMap(this::insert); + } + @Override public Flux insertAll(Mono> batchToSave, Class entityClass) { - return insertAll(batchToSave, determineCollectionName(entityClass)); + return insertAll(batchToSave, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(reactor.core.publisher.Mono, java.lang.String) - */ @Override public Flux insertAll(Mono> batchToSave, String collectionName) { - Assert.notNull(batchToSave, "Batch to insert must not be null!"); + Assert.notNull(batchToSave, "Batch to insert must not be null"); - return Flux.from(batchToSave).flatMap(collection -> insert(collection, collectionName)); + return Flux.from(batchToSave).flatMapSequential(collection -> insert(collection, collectionName)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(java.lang.Object) - */ + @Override public Mono insert(T objectToSave) { - Assert.notNull(objectToSave, "Object to insert must not be null!"); + Assert.notNull(objectToSave, "Object to insert must not be null"); - ensureNotIterable(objectToSave); - return insert(objectToSave, determineEntityCollectionName(objectToSave)); + ensureNotCollectionLike(objectToSave); + return insert(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(java.lang.Object, java.lang.String) - */ + @Override public Mono insert(T objectToSave, String collectionName) { - Assert.notNull(objectToSave, "Object to insert must not be null!"); + Assert.notNull(objectToSave, "Object to insert must not be null"); - ensureNotIterable(objectToSave); + ensureNotCollectionLike(objectToSave); return doInsert(collectionName, objectToSave, this.mongoConverter); } + @SuppressWarnings("NullAway") protected Mono doInsert(String collectionName, T objectToSave, MongoWriter writer) { - assertUpdateableIdIfNotSet(objectToSave); - - return Mono.defer(() -> { - - AdaptibleEntity entity = operations.forEntity(objectToSave, mongoConverter.getConversionService()); - T toSave = entity.initializeVersionProperty(); + return Mono.just(PersistableEntityModel.of(objectToSave, collectionName)) // + .doOnNext(it -> maybeEmitEvent(new BeforeConvertEvent<>(it.getSource(), it.getCollection()))) // + .flatMap(it -> maybeCallBeforeConvert(it.getSource(), it.getCollection()).map(it::mutate)) // + .map(it -> { - maybeEmitEvent(new BeforeConvertEvent<>(toSave, collectionName)); + AdaptibleEntity entity = operations.forEntity(it.getSource(), mongoConverter.getConversionService()); + entity.assertUpdateableIdIfNotSet(); - Document dbDoc = entity.toMappedDocument(writer).getDocument(); + PersistableEntityModel model = PersistableEntityModel.of(entity.initializeVersionProperty(), + entity.toMappedDocument(writer).getDocument(), it.getCollection()); - maybeEmitEvent(new BeforeSaveEvent<>(toSave, dbDoc, collectionName)); + maybeEmitEvent(new BeforeSaveEvent<>(model.getSource(), model.getTarget(), model.getCollection())); + return model; + })// + .flatMap(it -> { + return maybeCallBeforeSave(it.getSource(), it.getTarget(), it.getCollection()).map(it::mutate); + }).flatMap(it -> { - Mono afterInsert = insertDBObject(collectionName, dbDoc, toSave.getClass()).map(id -> { - - T saved = entity.populateIdIfNecessary(id); - maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName)); - return saved; - }); + return insertDocument(it.getCollection(), it.getTarget(), it.getSource().getClass()).flatMap(id -> { - return afterInsert; - }); + T saved = operations.forEntity(it.getSource(), mongoConverter.getConversionService()) + .populateIdIfNecessary(id); + maybeEmitEvent(new AfterSaveEvent<>(saved, it.getTarget(), collectionName)); + return maybeCallAfterSave(saved, it.getTarget(), collectionName); + }); + }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(java.util.Collection, java.lang.Class) - */ + @Override public Flux insert(Collection batchToSave, Class entityClass) { - return doInsertBatch(determineCollectionName(entityClass), batchToSave, this.mongoConverter); + return doInsertBatch(getCollectionName(entityClass), batchToSave, this.mongoConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insert(java.util.Collection, java.lang.String) - */ + @Override public Flux insert(Collection batchToSave, String collectionName) { return doInsertBatch(collectionName, batchToSave, this.mongoConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insertAll(java.util.Collection) - */ + @Override public Flux insertAll(Collection objectsToSave) { return doInsertAll(objectsToSave, this.mongoConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#insertAll(reactor.core.publisher.Mono) - */ @Override public Flux insertAll(Mono> objectsToSave) { - return Flux.from(objectsToSave).flatMap(this::insertAll); + return Flux.from(objectsToSave).flatMapSequential(this::insertAll); } + @SuppressWarnings("NullAway") protected Flux doInsertAll(Collection listToSave, MongoWriter writer) { - final Map> elementsByCollection = new HashMap<>(); + Map> elementsByCollection = new HashMap<>(); listToSave.forEach(element -> { - MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(element.getClass()); - - String collection = entity.getCollection(); + String collection = getCollectionName(element.getClass()); List collectionElements = elementsByCollection.computeIfAbsent(collection, k -> new ArrayList<>()); collectionElements.add(element); }); return Flux.fromIterable(elementsByCollection.keySet()) - .flatMap(collectionName -> doInsertBatch(collectionName, elementsByCollection.get(collectionName), writer)); + .concatMap(collectionName -> doInsertBatch(collectionName, elementsByCollection.get(collectionName), writer)); } - protected Flux doInsertBatch(final String collectionName, final Collection batchToSave, - final MongoWriter writer) { + @SuppressWarnings("NullAway") + protected Flux doInsertBatch(String collectionName, Collection batchToSave, + MongoWriter writer) { - Assert.notNull(writer, "MongoWriter must not be null!"); + Assert.notNull(writer, "MongoWriter must not be null"); - Mono, Document>>> prepareDocuments = Flux.fromIterable(batchToSave).map(o -> { + Mono, Document>>> prepareDocuments = Flux.fromIterable(batchToSave) + .flatMap(uninitialized -> { - AdaptibleEntity entity = operations.forEntity(o, mongoConverter.getConversionService()); - T toSave = entity.initializeVersionProperty(); + BeforeConvertEvent event = new BeforeConvertEvent<>(uninitialized, collectionName); + T toConvert = maybeEmitEvent(event).getSource(); - BeforeConvertEvent event = new BeforeConvertEvent<>(toSave, collectionName); - toSave = maybeEmitEvent(event).getSource(); + return maybeCallBeforeConvert(toConvert, collectionName).flatMap(it -> { + + AdaptibleEntity entity = operations.forEntity(it, mongoConverter.getConversionService()); + entity.assertUpdateableIdIfNotSet(); + + T initialized = entity.initializeVersionProperty(); + MappedDocument mapped = entity.toMappedDocument(writer); - Document dbDoc = entity.toMappedDocument(writer).getDocument(); + maybeEmitEvent(new BeforeSaveEvent<>(initialized, mapped.getDocument(), collectionName)); + return maybeCallBeforeSave(initialized, mapped.getDocument(), collectionName).map(toSave -> { - maybeEmitEvent(new BeforeSaveEvent<>(toSave, dbDoc, collectionName)); - return Tuples.of(entity, dbDoc); - }).collectList(); + MappedDocument mappedDocument = queryOperations.createInsertContext(mapped) + .prepareId(uninitialized.getClass()); + + return Tuples.of(entity, mappedDocument.getDocument()); + }); + }); + }).collectList(); Flux, Document>> insertDocuments = prepareDocuments.flatMapMany(tuples -> { - List dbObjects = tuples.stream().map(Tuple2::getT2).collect(Collectors.toList()); + List documents = tuples.stream().map(Tuple2::getT2).collect(Collectors.toList()); - return insertDocumentList(collectionName, dbObjects).thenMany(Flux.fromIterable(tuples)); + return insertDocumentList(collectionName, documents).thenMany(Flux.fromIterable(tuples)); }); - return insertDocuments.map(tuple -> { + return insertDocuments.flatMapSequential(tuple -> { - Object id = MappedDocument.of(tuple.getT2()).getId(); + Document document = tuple.getT2(); + Object id = MappedDocument.of(document).getId(); T saved = tuple.getT1().populateIdIfNecessary(id); - maybeEmitEvent(new AfterSaveEvent<>(saved, tuple.getT2(), collectionName)); - return saved; + maybeEmitEvent(new AfterSaveEvent<>(saved, document, collectionName)); + return maybeCallAfterSave(saved, document, collectionName); }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#save(reactor.core.publisher.Mono) - */ @Override public Mono save(Mono objectToSave) { - Assert.notNull(objectToSave, "Mono to save must not be null!"); + Assert.notNull(objectToSave, "Mono to save must not be null"); return objectToSave.flatMap(this::save); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#save(reactor.core.publisher.Mono, java.lang.String) - */ + } + @Override public Mono save(Mono objectToSave, String collectionName) { - Assert.notNull(objectToSave, "Mono to save must not be null!"); + Assert.notNull(objectToSave, "Mono to save must not be null"); return objectToSave.flatMap(o -> save(o, collectionName)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#save(java.lang.Object) - */ + @Override public Mono save(T objectToSave) { - Assert.notNull(objectToSave, "Object to save must not be null!"); - return save(objectToSave, determineEntityCollectionName(objectToSave)); + Assert.notNull(objectToSave, "Object to save must not be null"); + return save(objectToSave, getCollectionName(ClassUtils.getUserClass(objectToSave))); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#save(java.lang.Object, java.lang.String) - */ + @Override public Mono save(T objectToSave, String collectionName) { - Assert.notNull(objectToSave, "Object to save must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(objectToSave, "Object to save must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); - MongoPersistentEntity mongoPersistentEntity = getPersistentEntity(objectToSave.getClass()); - - // No optimistic locking -> simple save - if (mongoPersistentEntity == null || !mongoPersistentEntity.hasVersionProperty()) { - return doSave(collectionName, objectToSave, this.mongoConverter); - } + AdaptibleEntity source = operations.forEntity(objectToSave, mongoConverter.getConversionService()); - return doSaveVersioned(objectToSave, mongoPersistentEntity, collectionName); + return source.isVersionedEntity() ? doSaveVersioned(source, collectionName) + : doSave(collectionName, objectToSave, this.mongoConverter); } - private Mono doSaveVersioned(T objectToSave, MongoPersistentEntity entity, String collectionName) { + private Mono doSaveVersioned(AdaptibleEntity source, String collectionName) { - AdaptibleEntity forEntity = operations.forEntity(objectToSave, mongoConverter.getConversionService()); + if (source.isNew()) { + return doInsert(collectionName, source.getBean(), this.mongoConverter); + } return createMono(collectionName, collection -> { - Number versionNumber = forEntity.getVersion(); - - // Fresh instance -> initialize version property - if (versionNumber == null) { - return doInsert(collectionName, objectToSave, mongoConverter); - } - - forEntity.assertUpdateableIdIfNotSet(); + // Create query for entity with the id and old version + Query query = source.getQueryForVersion(); - Query query = forEntity.getQueryForVersion(); + // Bump version number + T toSave = source.incrementVersion(); - T toSave = forEntity.incrementVersion(); + source.assertUpdateableIdIfNotSet(); BeforeConvertEvent event = new BeforeConvertEvent<>(toSave, collectionName); - T afterEvent = ReactiveMongoTemplate.this.maybeEmitEvent(event).getSource(); + T afterEvent = maybeEmitEvent(event).getSource(); - MappedDocument mapped = operations.forEntity(toSave).toMappedDocument(mongoConverter); - Document document = mapped.getDocument(); + return maybeCallBeforeConvert(afterEvent, collectionName).flatMap(toConvert -> { - ReactiveMongoTemplate.this.maybeEmitEvent(new BeforeSaveEvent<>(afterEvent, document, collectionName)); + MappedDocument mapped = operations.forEntity(toConvert).toMappedDocument(mongoConverter); + Document document = mapped.getDocument(); - return doUpdate(collectionName, query, mapped.updateWithoutId(), afterEvent.getClass(), false, false) - .map(updateResult -> maybeEmitEvent(new AfterSaveEvent(afterEvent, document, collectionName)).getSource()); + maybeEmitEvent(new BeforeSaveEvent<>(toConvert, document, collectionName)); + return maybeCallBeforeSave(toConvert, document, collectionName).flatMap(it -> { + + return doUpdate(collectionName, query, mapped.updateWithoutId(), it.getClass(), false, false) + .flatMap(result -> { + maybeEmitEvent(new AfterSaveEvent(it, document, collectionName)); + return maybeCallAfterSave(it, document, collectionName); + }); + }); + }); }); } + @SuppressWarnings("NullAway") protected Mono doSave(String collectionName, T objectToSave, MongoWriter writer) { assertUpdateableIdIfNotSet(objectToSave); @@ -1424,27 +1577,36 @@ protected Mono doSave(String collectionName, T objectToSave, MongoWriter< T toSave = maybeEmitEvent(new BeforeConvertEvent(objectToSave, collectionName)).getSource(); - AdaptibleEntity entity = operations.forEntity(toSave, mongoConverter.getConversionService()); - Document dbDoc = entity.toMappedDocument(writer).getDocument(); - maybeEmitEvent(new BeforeSaveEvent(toSave, dbDoc, collectionName)); + return maybeCallBeforeConvert(toSave, collectionName).flatMap(toConvert -> { - return saveDocument(collectionName, dbDoc, toSave.getClass()).map(id -> { + AdaptibleEntity entity = operations.forEntity(toConvert, mongoConverter.getConversionService()); + Document dbDoc = entity.toMappedDocument(writer).getDocument(); + maybeEmitEvent(new BeforeSaveEvent(toConvert, dbDoc, collectionName)); - T saved = entity.populateIdIfNecessary(id); - return maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName)).getSource(); + return maybeCallBeforeSave(toConvert, dbDoc, collectionName).flatMap(it -> { + + return saveDocument(collectionName, dbDoc, it.getClass()).flatMap(id -> { + + T saved = entity.populateIdIfNecessary(id); + maybeEmitEvent(new AfterSaveEvent<>(saved, dbDoc, collectionName)); + return maybeCallAfterSave(saved, dbDoc, collectionName); + }); + }); }); }); } - protected Mono insertDBObject(final String collectionName, final Document dbDoc, final Class entityClass) { + protected Mono insertDocument(String collectionName, Document dbDoc, Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Inserting Document containing fields: " + dbDoc.keySet() + " in collection: " + collectionName); + LOGGER.debug(String + .format("Inserting Document containing fields: " + dbDoc.keySet() + " in collection: " + collectionName)); } - Document document = new Document(dbDoc); + MappedDocument document = MappedDocument.of(dbDoc); + queryOperations.createInsertContext(document).prepareId(entityClass); - Flux execute = execute(collectionName, collection -> { + Flux execute = execute(collectionName, collection -> { MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.INSERT, collectionName, entityClass, dbDoc, null); @@ -1452,23 +1614,23 @@ protected Mono insertDBObject(final String collectionName, final Documen MongoCollection collectionToUse = prepareCollection(collection, writeConcernToUse); - return collectionToUse.insertOne(document); + return collectionToUse.insertOne(document.getDocument()); }); - return Flux.from(execute).last().map(success -> MappedDocument.of(document).getId()); + return Flux.from(execute).last().map(success -> document.getId()); } - protected Flux insertDocumentList(final String collectionName, final List dbDocList) { + protected Flux insertDocumentList(String collectionName, List dbDocList) { if (dbDocList.isEmpty()) { return Flux.empty(); } if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Inserting list of DBObjects containing " + dbDocList.size() + " items"); + LOGGER.debug(String.format("Inserting list of Documents containing %d items", dbDocList.size())); } - final List documents = new ArrayList<>(); + List documents = new ArrayList<>(dbDocList.size()); return execute(collectionName, collection -> { @@ -1481,7 +1643,7 @@ protected Flux insertDocumentList(final String collectionName, final L return collectionToUse.insertMany(documents); - }).flatMap(s -> { + }).flatMapSequential(s -> { return Flux.fromStream(documents.stream() // .map(MappedDocument::of) // @@ -1500,11 +1662,11 @@ private MongoCollection prepareCollection(MongoCollection co return collectionToUse; } - protected Mono saveDocument(final String collectionName, final Document document, - final Class entityClass) { + @SuppressWarnings("NullAway") + protected Mono saveDocument(String collectionName, Document document, Class entityClass) { if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Saving Document containing fields: " + document.keySet()); + LOGGER.debug(String.format("Saving Document containing fields: %s", document.keySet())); } return createMono(collectionName, collection -> { @@ -1518,138 +1680,177 @@ protected Mono saveDocument(final String collectionName, final Document ? collection // : collection.withWriteConcern(writeConcernToUse); - Publisher publisher = !mapped.hasId() // - ? collectionToUse.insertOne(document) // - : collectionToUse.replaceOne(mapped.getIdFilter(), document, new ReplaceOptions().upsert(true)); + Publisher publisher; + if (!mapped.hasId()) { + publisher = collectionToUse + .insertOne(queryOperations.createInsertContext(mapped).prepareId(entityClass).getDocument()); + } else { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); + UpdateContext updateContext = queryOperations.replaceSingleContext(mapped, true); + Document filter = updateContext.getReplacementQuery(); + Document replacement = updateContext.getMappedUpdate(entity); + + Mono deferredFilter; + + if (updateContext.requiresShardKey(filter, entity)) { + if (entity.getShardKey().isImmutable()) { + deferredFilter = Mono.just(updateContext.applyShardKey(entity, filter, null)); + } else { + deferredFilter = Mono + .from( + collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first()) + .defaultIfEmpty(replacement).map(it -> updateContext.applyShardKey(entity, filter, it)); + } + } else { + deferredFilter = Mono.just(filter); + } + + publisher = deferredFilter.flatMapMany( + it -> collectionToUse.replaceOne(it, replacement, updateContext.getReplaceOptions(entityClass))); + } return Mono.from(publisher).map(o -> mapped.getId()); }); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class) - */ - public Mono upsert(Query query, Update update, Class entityClass) { - return doUpdate(determineCollectionName(entityClass), query, update, entityClass, true, false); + @Override + public Mono upsert(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, true, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.String) - */ - public Mono upsert(Query query, Update update, String collectionName) { + @Override + public Mono upsert(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, true, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#upsert(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class, java.lang.String) - */ - public Mono upsert(Query query, Update update, Class entityClass, String collectionName) { + @Override + public Mono upsert(Query query, UpdateDefinition update, Class entityClass, String collectionName) { return doUpdate(collectionName, query, update, entityClass, true, false); } /* * (non-Javadoc)) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateFirst(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class) + * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateFirst(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.UpdateDefinition, java.lang.Class) */ - public Mono updateFirst(Query query, Update update, Class entityClass) { - return doUpdate(determineCollectionName(entityClass), query, update, entityClass, false, false); + @Override + public Mono updateFirst(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateFirst(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.String) - */ - public Mono updateFirst(final Query query, final Update update, final String collectionName) { + @Override + public Mono updateFirst(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateFirst(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class, java.lang.String) - */ - public Mono updateFirst(Query query, Update update, Class entityClass, String collectionName) { + @Override + public Mono updateFirst(Query query, UpdateDefinition update, Class entityClass, + String collectionName) { return doUpdate(collectionName, query, update, entityClass, false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class) - */ - public Mono updateMulti(Query query, Update update, Class entityClass) { - return doUpdate(determineCollectionName(entityClass), query, update, entityClass, false, true); + @Override + public Mono updateMulti(Query query, UpdateDefinition update, Class entityClass) { + return doUpdate(getCollectionName(entityClass), query, update, entityClass, false, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.String) - */ - public Mono updateMulti(final Query query, final Update update, String collectionName) { + @Override + public Mono updateMulti(Query query, UpdateDefinition update, String collectionName) { return doUpdate(collectionName, query, update, null, false, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#updateMulti(org.springframework.data.mongodb.core.query.Query, org.springframework.data.mongodb.core.query.Update, java.lang.Class, java.lang.String) - */ - public Mono updateMulti(final Query query, final Update update, Class entityClass, + @Override + public Mono updateMulti(Query query, UpdateDefinition update, Class entityClass, String collectionName) { return doUpdate(collectionName, query, update, entityClass, false, true); } - protected Mono doUpdate(final String collectionName, Query query, @Nullable Update update, - @Nullable Class entityClass, final boolean upsert, final boolean multi) { + @SuppressWarnings("NullAway") + protected Mono doUpdate(String collectionName, Query query, UpdateDefinition update, + @Nullable Class entityClass, boolean upsert, boolean multi) { MongoPersistentEntity entity = entityClass == null ? null : getPersistentEntity(entityClass); - Flux result = execute(collectionName, collection -> { + UpdateContext updateContext = multi ? queryOperations.updateContext(update, query, upsert) + : queryOperations.updateSingleContext(update, query, upsert); + updateContext.increaseVersionForUpdateIfNecessary(entity); - increaseVersionForUpdateIfNecessary(entity, update); + Document queryObj = updateContext.getMappedQuery(entity); + UpdateOptions updateOptions = updateContext.getUpdateOptions(entityClass, query); - Document queryObj = queryMapper.getMappedObject(query.getQueryObject(), entity); - Document updateObj = update == null ? new Document() - : updateMapper.getMappedObject(update.getUpdateObject(), entity); + Flux result; - if (LOGGER.isDebugEnabled()) { - LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s", - serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName)); - } + if (updateContext.isAggregationUpdate()) { + + List pipeline = updateContext.getUpdatePipeline(entityClass); + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, entityClass, + update.getUpdateObject(), queryObj); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + + result = execute(collectionName, collection -> { + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s", + serializeToJsonSafely(queryObj), serializeToJsonSafely(pipeline), collectionName)); + } + + collection = writeConcernToUse != null ? collection.withWriteConcern(writeConcernToUse) : collection; + return multi ? collection.updateMany(queryObj, pipeline, updateOptions) + : collection.updateOne(queryObj, pipeline, updateOptions); + }); + } else { + + Document updateObj = updateContext.getMappedUpdate(entity); MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.UPDATE, collectionName, entityClass, updateObj, queryObj); WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - MongoCollection collectionToUse = prepareCollection(collection, writeConcernToUse); - UpdateOptions updateOptions = new UpdateOptions().upsert(upsert); - query.getCollation().map(Collation::toMongoCollation).ifPresent(updateOptions::collation); + result = execute(collectionName, collection -> { - if (!UpdateMapper.isUpdateObject(updateObj)) { + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("Calling update using query: %s and update: %s in collection: %s", + serializeToJsonSafely(queryObj), serializeToJsonSafely(updateObj), collectionName)); + } - ReplaceOptions replaceOptions = new ReplaceOptions(); - replaceOptions.upsert(updateOptions.isUpsert()); - replaceOptions.collation(updateOptions.getCollation()); + MongoCollection collectionToUse = prepareCollection(collection, writeConcernToUse); - return collectionToUse.replaceOne(queryObj, updateObj, replaceOptions); - } - if (multi) { - return collectionToUse.updateMany(queryObj, updateObj, updateOptions); - } - return collectionToUse.updateOne(queryObj, updateObj, updateOptions); - }).doOnNext(updateResult -> { + if (!UpdateMapper.isUpdateObject(updateObj)) { + + Document filter = new Document(queryObj); + Mono deferredFilter; + + if (updateContext.requiresShardKey(filter, entity)) { + if (entity.getShardKey().isImmutable()) { + deferredFilter = Mono.just(updateContext.applyShardKey(entity, filter, null)); + } else { + deferredFilter = Mono.from( + collection.find(filter, Document.class).projection(updateContext.getMappedShardKey(entity)).first()) + .defaultIfEmpty(updateObj).map(it -> updateContext.applyShardKey(entity, filter, it)); + } + } else { + deferredFilter = Mono.just(filter); + } + + com.mongodb.client.model.ReplaceOptions replaceOptions = updateContext.getReplaceOptions(entityClass); + return deferredFilter.flatMap(it -> Mono.from(collectionToUse.replaceOne(it, updateObj, replaceOptions))); + } + + return multi ? collectionToUse.updateMany(queryObj, updateObj, updateOptions) + : collectionToUse.updateOne(queryObj, updateObj, updateOptions); + }); + } + + result = result.doOnNext(updateResult -> { if (entity != null && entity.hasVersionProperty() && !multi) { if (updateResult.wasAcknowledged() && updateResult.getMatchedCount() == 0) { - Document queryObj = query == null ? new Document() - : queryMapper.getMappedObject(query.getQueryObject(), entity); - Document updateObj = update == null ? new Document() - : updateMapper.getMappedObject(update.getUpdateObject(), entity); - if (dbObjectContainsVersionProperty(queryObj, entity)) - throw new OptimisticLockingFailureException("Optimistic lock exception on saving entity: " - + updateObj.toString() + " to collection " + collectionName); + Document updateObj = updateContext.getMappedUpdate(entity); + if (containsVersionProperty(queryObj, entity)) + throw new OptimisticLockingFailureException("Optimistic lock exception on saving entity %s to collection %s" + .formatted(entity.getName(), collectionName)); } } }); @@ -1657,65 +1858,40 @@ protected Mono doUpdate(final String collectionName, Query query, return result.next(); } - private void increaseVersionForUpdateIfNecessary(@Nullable MongoPersistentEntity persistentEntity, Update update) { - - if (persistentEntity != null && persistentEntity.hasVersionProperty()) { - String versionFieldName = persistentEntity.getRequiredVersionProperty().getFieldName(); - if (!update.modifies(versionFieldName)) { - update.inc(versionFieldName, 1L); - } - } - } - - private boolean dbObjectContainsVersionProperty(Document document, - @Nullable MongoPersistentEntity persistentEntity) { + private boolean containsVersionProperty(Document document, @Nullable MongoPersistentEntity persistentEntity) { if (persistentEntity == null || !persistentEntity.hasVersionProperty()) { return false; } - return document.containsKey(persistentEntity.getRequiredIdProperty().getFieldName()); + return document.containsKey(persistentEntity.getRequiredVersionProperty().getFieldName()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(reactor.core.publisher.Mono) - */ @Override public Mono remove(Mono objectToRemove) { return objectToRemove.flatMap(this::remove); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(reactor.core.publisher.Mono, java.lang.String) - */ @Override public Mono remove(Mono objectToRemove, String collectionName) { return objectToRemove.flatMap(it -> remove(it, collectionName)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(java.lang.Object) - */ + @Override public Mono remove(Object object) { - Assert.notNull(object, "Object must not be null!"); + Assert.notNull(object, "Object must not be null"); - return remove(operations.forEntity(object).getByIdQuery(), object.getClass()); + return remove(operations.forEntity(object).getRemoveByQuery(), object.getClass()); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(java.lang.Object, java.lang.String) - */ + @Override public Mono remove(Object object, String collectionName) { - Assert.notNull(object, "Object must not be null!"); - Assert.hasText(collectionName, "Collection name must not be null or empty!"); + Assert.notNull(object, "Object must not be null"); + Assert.hasText(collectionName, "Collection name must not be null or empty"); - return doRemove(collectionName, operations.forEntity(object).getByIdQuery(), object.getClass()); + return doRemove(collectionName, operations.forEntity(object).getRemoveByQuery(), object.getClass()); } private void assertUpdateableIdIfNotSet(Object value) { @@ -1733,32 +1909,23 @@ private void assertUpdateableIdIfNotSet(Object value) { if (!MongoSimpleTypes.AUTOGENERATED_ID_TYPES.contains(property.getType())) { throw new InvalidDataAccessApiUsageException( - String.format("Cannot autogenerate id of type %s for entity of type %s!", property.getType().getName(), + String.format("Cannot autogenerate id of type %s for entity of type %s", property.getType().getName(), value.getClass().getName())); } } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(org.springframework.data.mongodb.core.query.Query, java.lang.String) - */ + @Override public Mono remove(Query query, String collectionName) { return remove(query, null, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + @Override public Mono remove(Query query, Class entityClass) { - return remove(query, entityClass, determineCollectionName(entityClass)); + return remove(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#remove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ + @Override public Mono remove(Query query, @Nullable Class entityClass, String collectionName) { return doRemove(collectionName, query, entityClass); } @@ -1766,38 +1933,38 @@ public Mono remove(Query query, @Nullable Class entityClass, St protected Mono doRemove(String collectionName, Query query, @Nullable Class entityClass) { if (query == null) { - throw new InvalidDataAccessApiUsageException("Query passed in to remove can't be null!"); + throw new InvalidDataAccessApiUsageException("Query passed in to remove can't be null"); } - Assert.hasText(collectionName, "Collection name must not be null or empty!"); - - final Document queryObject = query.getQueryObject(); - final MongoPersistentEntity entity = getPersistentEntity(entityClass); + Assert.hasText(collectionName, "Collection name must not be null or empty"); - return execute(collectionName, collection -> { - - Document removeQuey = queryMapper.getMappedObject(queryObject, entity); + MongoPersistentEntity entity = getPersistentEntity(entityClass); - maybeEmitEvent(new BeforeDeleteEvent<>(removeQuey, entityClass, collectionName)); + DeleteContext deleteContext = queryOperations.deleteQueryContext(query); + Document queryObject = deleteContext.getMappedQuery(entity); + DeleteOptions deleteOptions = deleteContext.getDeleteOptions(entityClass); + Document removeQuery = deleteContext.getMappedQuery(entity); + MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, entityClass, + null, removeQuery); + WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); - MongoAction mongoAction = new MongoAction(writeConcern, MongoActionOperation.REMOVE, collectionName, entityClass, - null, removeQuey); + return execute(collectionName, collection -> { - final DeleteOptions deleteOptions = new DeleteOptions(); - query.getCollation().map(Collation::toMongoCollation).ifPresent(deleteOptions::collation); + maybeEmitEvent(new BeforeDeleteEvent<>(removeQuery, entityClass, collectionName)); - WriteConcern writeConcernToUse = prepareWriteConcern(mongoAction); - MongoCollection collectionToUse = prepareCollection(collection, writeConcernToUse); + MongoCollection collectionToUse = collectionPreparer + .prepare(prepareCollection(collection, writeConcernToUse)); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Remove using query: {} in collection: {}.", - new Object[] { serializeToJsonSafely(removeQuey), collectionName }); + LOGGER.debug(String.format("Remove using query: %s in collection: %s.", serializeToJsonSafely(removeQuery), + collectionName)); } if (query.getLimit() > 0 || query.getSkip() > 0) { FindPublisher cursor = new QueryFindPublisherPreparer(query, entityClass) - .prepare(collection.find(removeQuey)) // + .prepare(collection.find(removeQuery)) // .projection(MappedDocument.getIdOnlyProjection()); return Flux.from(cursor) // @@ -1808,85 +1975,92 @@ protected Mono doRemove(String collectionName, Query query, @N return collectionToUse.deleteMany(MappedDocument.getIdIn(val), deleteOptions); }); } else { - return collectionToUse.deleteMany(removeQuey, deleteOptions); + return collectionToUse.deleteMany(removeQuery, deleteOptions); } - }).doOnNext(deleteResult -> maybeEmitEvent(new AfterDeleteEvent<>(queryObject, entityClass, collectionName))) + }).doOnNext(it -> maybeEmitEvent(new AfterDeleteEvent<>(queryObject, entityClass, collectionName))) // .next(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAll(java.lang.Class) - */ + @Override public Flux findAll(Class entityClass) { - return findAll(entityClass, determineCollectionName(entityClass)); + return findAll(entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAll(java.lang.Class, java.lang.String) - */ + @Override public Flux findAll(Class entityClass, String collectionName) { - return executeFindMultiInternal(new FindCallback(null), null, - new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName), collectionName); + return executeFindMultiInternal(new FindCallback(CollectionPreparer.identity(), null), + FindPublisherPreparer.NO_OP_PREPARER, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName), + collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.String) - */ @Override @SuppressWarnings("unchecked") public Flux findAllAndRemove(Query query, String collectionName) { return (Flux) findAllAndRemove(query, Object.class, collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ @Override public Flux findAllAndRemove(Query query, Class entityClass) { - return findAllAndRemove(query, entityClass, determineCollectionName(entityClass)); + return findAllAndRemove(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#findAllAndRemove(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override public Flux findAllAndRemove(Query query, Class entityClass, String collectionName) { return doFindAndDelete(collectionName, query, entityClass); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#tail(org.springframework.data.mongodb.core.query.Query, java.lang.Class) - */ + @Override + public Mono replace(Query query, T replacement, ReplaceOptions options, String collectionName) { + + Assert.notNull(replacement, "Replacement must not be null"); + return replace(query, (Class) ClassUtils.getUserClass(replacement), replacement, options, collectionName); + } + + protected Mono replace(Query query, Class entityType, T replacement, ReplaceOptions options, + String collectionName) { + + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityType); + UpdateContext updateContext = queryOperations.replaceSingleContext(query, + operations.forEntity(replacement).toMappedDocument(this.mongoConverter), options.isUpsert()); + + return createMono(collectionName, collection -> { + + Document mappedUpdate = updateContext.getMappedUpdate(entity); + + MongoAction action = new MongoAction(writeConcern, MongoActionOperation.REPLACE, collectionName, entityType, + mappedUpdate, updateContext.getQueryObject()); + + MongoCollection collectionToUse = createCollectionPreparer(query, action).prepare(collection); + + return collectionToUse.replaceOne(updateContext.getMappedQuery(entity), mappedUpdate, + updateContext.getReplaceOptions(entityType, it -> { + it.upsert(options.isUpsert()); + })); + }); + } + @Override public Flux tail(Query query, Class entityClass) { - return tail(query, entityClass, determineCollectionName(entityClass)); + return tail(query, entityClass, getCollectionName(entityClass)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#tail(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override public Flux tail(@Nullable Query query, Class entityClass, String collectionName) { if (query == null) { - // TODO: clean up - LOGGER.debug(String.format("find for class: %s in collection: %s", entityClass, collectionName)); + LOGGER.debug(String.format("Tail for class: %s in collection: %s", entityClass, collectionName)); return executeFindMultiInternal( - collection -> new FindCallback(null).doInCollection(collection).cursorType(CursorType.TailableAwait), null, - new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName), collectionName); + collection -> new FindCallback(CollectionPreparer.identity(), null).doInCollection(collection) + .cursorType(CursorType.TailableAwait), + FindPublisherPreparer.NO_OP_PREPARER, new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName), + collectionName); } - return doFind(collectionName, query.getQueryObject(), query.getFieldsObject(), entityClass, + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(query); + return doFind(collectionName, collectionPreparer, query.getQueryObject(), query.getFieldsObject(), entityClass, new TailingQueryFindPublisherPreparer(query, entityClass)); } @@ -1898,37 +2072,46 @@ public Flux> changeStream(@Nullable String database, @N FullDocument fullDocument = ClassUtils.isAssignable(Document.class, targetType) ? FullDocument.DEFAULT : FullDocument.UPDATE_LOOKUP; - MongoDatabase db = StringUtils.hasText(database) ? mongoDatabaseFactory.getMongoDatabase(database) - : getMongoDatabase(); - - ChangeStreamPublisher publisher; - if (StringUtils.hasText(collectionName)) { - publisher = filter.isEmpty() ? db.getCollection(collectionName).watch(Document.class) - : db.getCollection(collectionName).watch(filter, Document.class); + return ReactiveMongoDatabaseUtils.getDatabase(database, mongoDatabaseFactory) // + .map(db -> { + ChangeStreamPublisher publisher; + if (StringUtils.hasText(collectionName)) { + publisher = filter.isEmpty() ? db.getCollection(collectionName).watch(Document.class) + : db.getCollection(collectionName).watch(filter, Document.class); - } else { - publisher = filter.isEmpty() ? db.watch(Document.class) : db.watch(filter, Document.class); - } + } else { + publisher = filter.isEmpty() ? db.watch(Document.class) : db.watch(filter, Document.class); + } - publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::resumeAfter).orElse(publisher); - publisher = options.getCollation().map(Collation::toMongoCollation).map(publisher::collation).orElse(publisher); - publisher = options.getResumeTimestamp().map(it -> new BsonTimestamp(it.toEpochMilli())) - .map(publisher::startAtOperationTime).orElse(publisher); - publisher = publisher.fullDocument(options.getFullDocumentLookup().orElse(fullDocument)); + if (options.isResumeAfter()) { + publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::resumeAfter) + .orElse(publisher); + } else if (options.isStartAfter()) { + publisher = options.getResumeToken().map(BsonValue::asDocument).map(publisher::startAfter) + .orElse(publisher); + } + publisher = options.getCollation().map(Collation::toMongoCollation).map(publisher::collation) + .orElse(publisher); + publisher = options.getResumeBsonTimestamp().map(publisher::startAtOperationTime).orElse(publisher); - return Flux.from(publisher).map(document -> new ChangeStreamEvent<>(document, targetType, getConverter())); + if (options.getFullDocumentBeforeChangeLookup().isPresent()) { + publisher = publisher.fullDocumentBeforeChange(options.getFullDocumentBeforeChangeLookup().get()); + } + return publisher.fullDocument(options.getFullDocumentLookup().orElse(fullDocument)); + }) // + .flatMapMany(publisher -> Flux.from(publisher) + .map(document -> new ChangeStreamEvent<>(document, targetType, getConverter()))); } List prepareFilter(ChangeStreamOptions options) { Object filter = options.getFilter().orElse(Collections.emptyList()); - if (filter instanceof Aggregation) { - Aggregation agg = (Aggregation) filter; - AggregationOperationContext context = agg instanceof TypedAggregation - ? new TypeBasedAggregationOperationContext(((TypedAggregation) agg).getInputType(), + if (filter instanceof Aggregation agg) { + AggregationOperationContext context = agg instanceof TypedAggregation typedAggregation + ? new TypeBasedAggregationOperationContext(typedAggregation.getInputType(), getConverter().getMappingContext(), queryMapper) - : Aggregation.DEFAULT_CONTEXT; + : new RelaxedTypeBasedAggregationOperationContext(Object.class, mappingContext, queryMapper); return agg.toPipeline(new PrefixingDelegatingAggregationOperationContext(context, "fullDocument", Arrays.asList("operationType", "fullDocument", "documentKey", "updateDescription", "ns"))); @@ -1942,57 +2125,54 @@ List prepareFilter(ChangeStreamOptions options) { "ChangeStreamRequestOptions.filter mut be either an Aggregation or a plain list of Documents"); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#mapReduce(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.Class, java.lang.String, java.lang.String, org.springframework.data.mongodb.core.mapreduce.MapReduceOptions) - */ + @Override public Flux mapReduce(Query filterQuery, Class domainType, Class resultType, String mapFunction, String reduceFunction, MapReduceOptions options) { - return mapReduce(filterQuery, domainType, determineCollectionName(domainType), resultType, mapFunction, - reduceFunction, options); + return mapReduce(filterQuery, domainType, getCollectionName(domainType), resultType, mapFunction, reduceFunction, + options); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoOperations#mapReduce(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String, java.lang.Class, java.lang.String, java.lang.String, org.springframework.data.mongodb.core.mapreduce.MapReduceOptions) - */ + @Override public Flux mapReduce(Query filterQuery, Class domainType, String inputCollectionName, Class resultType, String mapFunction, String reduceFunction, MapReduceOptions options) { - Assert.notNull(filterQuery, "Filter query must not be null!"); - Assert.notNull(domainType, "Domain type must not be null!"); - Assert.hasText(inputCollectionName, "Input collection name must not be null or empty!"); - Assert.notNull(resultType, "Result type must not be null!"); - Assert.notNull(mapFunction, "Map function must not be null!"); - Assert.notNull(reduceFunction, "Reduce function must not be null!"); - Assert.notNull(options, "MapReduceOptions must not be null!"); + Assert.notNull(filterQuery, "Filter query must not be null"); + Assert.notNull(domainType, "Domain type must not be null"); + Assert.hasText(inputCollectionName, "Input collection name must not be null or empty"); + Assert.notNull(resultType, "Result type must not be null"); + Assert.notNull(mapFunction, "Map function must not be null"); + Assert.notNull(reduceFunction, "Reduce function must not be null"); + Assert.notNull(options, "MapReduceOptions must not be null"); assertLocalFunctionNames(mapFunction, reduceFunction); + ReactiveCollectionPreparerDelegate collectionPreparer = ReactiveCollectionPreparerDelegate.of(filterQuery); return createFlux(inputCollectionName, collection -> { Document mappedQuery = queryMapper.getMappedObject(filterQuery.getQueryObject(), mappingContext.getPersistentEntity(domainType)); - MapReducePublisher publisher = collection.mapReduce(mapFunction, reduceFunction, Document.class); - - if (StringUtils.hasText(options.getOutputCollection())) { - publisher = publisher.collectionName(options.getOutputCollection()); - } + MapReducePublisher publisher = collectionPreparer.prepare(collection).mapReduce(mapFunction, + reduceFunction, Document.class); publisher.filter(mappedQuery); - publisher.sort(getMappedSortObject(filterQuery, domainType)); - if (filterQuery.getMeta().getMaxTimeMsec() != null) { - publisher.maxTime(filterQuery.getMeta().getMaxTimeMsec(), TimeUnit.MILLISECONDS); + Document mappedSort = getMappedSortObject(filterQuery, domainType); + if (mappedSort != null && !mappedSort.isEmpty()) { + publisher.sort(mappedSort); + } + + Meta meta = filterQuery.getMeta(); + if (meta.hasMaxTime()) { + publisher.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS); } if (filterQuery.getLimit() > 0 || (options.getLimit() != null)) { if (filterQuery.getLimit() > 0 && (options.getLimit() != null)) { throw new IllegalArgumentException( - "Both Query and MapReduceOptions define a limit. Please provide the limit only via one of the two."); + "Both Query and MapReduceOptions define a limit; Please provide the limit only via one of the two."); } if (filterQuery.getLimit() > 0) { @@ -2008,7 +2188,7 @@ public Flux mapReduce(Query filterQuery, Class domainType, String inpu Optionals.ifAllPresent(filterQuery.getCollation(), options.getCollation(), (l, r) -> { throw new IllegalArgumentException( - "Both Query and MapReduceOptions define a collation. Please provide the collation only via one of the two."); + "Both Query and MapReduceOptions define a collation; Please provide the collation only via one of the two."); }); if (options.getCollation().isPresent()) { @@ -2018,23 +2198,31 @@ public Flux mapReduce(Query filterQuery, Class domainType, String inpu if (!CollectionUtils.isEmpty(options.getScopeVariables())) { publisher = publisher.scope(new Document(options.getScopeVariables())); } + if (options.getLimit() != null && options.getLimit() > 0) { publisher = publisher.limit(options.getLimit()); } + if (options.getFinalizeFunction().filter(StringUtils::hasText).isPresent()) { publisher = publisher.finalizeFunction(options.getFinalizeFunction().get()); } + if (options.getJavaScriptMode() != null) { publisher = publisher.jsMode(options.getJavaScriptMode()); } - if (options.getOutputSharded().isPresent()) { - publisher = publisher.sharded(options.getOutputSharded().get()); + + if (StringUtils.hasText(options.getOutputCollection()) && !options.usesInlineOutput()) { + publisher = publisher.collectionName(options.getOutputCollection()).action(options.getMapReduceAction()); + + if (options.getOutputDatabase().isPresent()) { + publisher = publisher.databaseName(options.getOutputDatabase().get()); + } } publisher = collation.map(Collation::toMongoCollation).map(publisher::collation).orElse(publisher); return Flux.from(publisher) - .map(new ReadDocumentCallback<>(mongoConverter, resultType, inputCollectionName)::doWith); + .flatMapSequential(new ReadDocumentCallback<>(mongoConverter, resultType, inputCollectionName)::doWith); }); } @@ -2045,66 +2233,47 @@ private static void assertLocalFunctionNames(String... functions) { if (ResourceUtils.isUrl(function)) { throw new IllegalArgumentException(String.format( - "Blocking accessing to resource %s is not allowed using reactive infrastructure. You may load the resource at startup and cache its value.", + "Blocking accessing to resource %s is not allowed using reactive infrastructure; You may load the resource at startup and cache its value.", function)); } } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveFindOperation#query(java.lang.Class) - */ @Override public ReactiveFind query(Class domainType) { return new ReactiveFindOperationSupport(this).query(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation#update(java.lang.Class) - */ @Override public ReactiveUpdate update(Class domainType) { return new ReactiveUpdateOperationSupport(this).update(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation#remove(java.lang.Class) - */ @Override public ReactiveRemove remove(Class domainType) { return new ReactiveRemoveOperationSupport(this).remove(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveInsertOperation#insert(java.lang.Class) - */ @Override public ReactiveInsert insert(Class domainType) { return new ReactiveInsertOperationSupport(this).insert(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveAggregationOperation#aggregateAndReturn(java.lang.Class) - */ @Override public ReactiveAggregation aggregateAndReturn(Class domainType) { return new ReactiveAggregationOperationSupport(this).aggregateAndReturn(domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMapReduceOperation#mapReduce(java.lang.Class) - */ @Override public ReactiveMapReduce mapReduce(Class domainType) { return new ReactiveMapReduceOperationSupport(this).mapReduce(domainType); } + @Override + public ReactiveChangeStream changeStream(Class domainType) { + return new ReactiveChangeStreamOperationSupport(this).changeStream(domainType); + } + /** * Retrieve and remove all documents matching the given {@code query} by calling {@link #find(Query, Class, String)} * and {@link #remove(Query, Class, String)}, whereas the {@link Query} for {@link #remove(Query, Class, String)} is @@ -2119,9 +2288,47 @@ protected Flux doFindAndDelete(String collectionName, Query query, Class< Flux flux = find(query, entityClass, collectionName); - return Flux.from(flux).collectList() + return Flux.from(flux).collectList().filter(it -> !it.isEmpty()) .flatMapMany(list -> Flux.from(remove(operations.getByIdInQuery(list), entityClass, collectionName)) - .flatMap(deleteResult -> Flux.fromIterable(list))); + .flatMapSequential(deleteResult -> Flux.fromIterable(list))); + } + + @SuppressWarnings({"rawtypes", "unchecked", "NullAway"}) + Flux doFindAndDelete(String collectionName, Query query, Class entityClass, + QueryResultConverter resultConverter) { + + List ids = new ArrayList<>(); + ProjectingReadCallback readCallback = new ProjectingReadCallback(getConverter(), + EntityProjection.nonProjecting(entityClass), collectionName); + + QueryResultConverterCallback callback = new QueryResultConverterCallback<>(resultConverter, readCallback) { + + @Override + public Mono doWith(Document object) { + ids.add(object.get("_id")); + return super.doWith(object); + } + }; + + Flux flux = doFind(collectionName, ReactiveCollectionPreparerDelegate.of(query), query.getQueryObject(), + query.getFieldsObject(), entityClass, + new QueryFindPublisherPreparer(query, query.getSortObject(), query.getLimit(), query.getSkip(), entityClass), + callback); + + return Flux.from(flux).collectList().filter(it -> !it.isEmpty()).flatMapMany(list -> { + + Criteria[] criterias = ids.stream() // + .map(it -> Criteria.where("_id").is(it)) // + .toArray(Criteria[]::new); + + Query removeQuery = new Query(criterias.length == 1 ? criterias[0] : new Criteria().orOperator(criterias)); + if (query.hasReadPreference()) { + removeQuery.withReadPreference(query.getReadPreference()); + } + + return Flux.from(remove(removeQuery, entityClass, collectionName)) + .flatMapSequential(deleteResult -> Flux.fromIterable(list)); + }); } /** @@ -2131,17 +2338,17 @@ protected Flux doFindAndDelete(String collectionName, Query query, Class< * @param collectionOptions * @return the collection that was created */ - protected Mono> doCreateCollection(final String collectionName, - final CreateCollectionOptions collectionOptions) { + protected Mono> doCreateCollection(String collectionName, + CreateCollectionOptions collectionOptions) { - return createMono(db -> db.createCollection(collectionName, collectionOptions)).map(success -> { + return createMono(db -> db.createCollection(collectionName, collectionOptions)).doOnSuccess(it -> { // TODO: Emit a collection created event if (LOGGER.isDebugEnabled()) { - LOGGER.debug("Created collection [{}]", collectionName); + LOGGER.debug(String.format("Created collection [%s]", collectionName)); } - return getCollection(collectionName); - }); + + }).then(getCollection(collectionName)); } /** @@ -2149,25 +2356,51 @@ protected Mono> doCreateCollection(final String collec * The query document is specified as a standard {@link Document} and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record. * @param fields the document that specifies the fields to be returned. * @param entityClass the parameterized type of the returned list. * @param collation can be {@literal null}. * @return the {@link List} of converted objects. */ - protected Mono doFindOne(String collectionName, Document query, @Nullable Document fields, + protected Mono doFindOne(String collectionName, + CollectionPreparer> collectionPreparer, Document query, @Nullable Document fields, Class entityClass, @Nullable Collation collation) { + return doFindOne(collectionName, collectionPreparer, query, fields, entityClass, + findPublisher -> collation != null ? findPublisher.collation(collation.toMongoCollation()) : findPublisher); + } + + /** + * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. + * The query document is specified as a standard {@link Document} and so is the fields specification. + * + * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param query the query document that specifies the criteria used to find a record. + * @param fields the document that specifies the fields to be returned. + * @param entityClass the parameterized type of the returned list. + * @param preparer the preparer modifying collection and publisher to fit the needs. + * @return the {@link List} of converted objects. + * @since 2.2 + */ + protected Mono doFindOne(String collectionName, + CollectionPreparer> collectionPreparer, Document query, @Nullable Document fields, + Class entityClass, FindPublisherPreparer preparer) { + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - Document mappedQuery = queryMapper.getMappedObject(query, entity); - Document mappedFields = fields == null ? null : queryMapper.getMappedObject(fields, entity); + + QueryContext queryContext = queryOperations + .createQueryContext(new BasicQuery(query, fields != null ? fields : new Document())); + Document mappedFields = queryContext.getMappedFields(entity, EntityProjection.nonProjecting(entityClass)); + Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { LOGGER.debug(String.format("findOne using query: %s fields: %s for class: %s in collection: %s", serializeToJsonSafely(query), mappedFields, entityClass, collectionName)); } - return executeFindOneInternal(new FindOneCallback(mappedQuery, mappedFields, collation), + return executeFindOneInternal(new FindOneCallback(collectionPreparer, mappedQuery, mappedFields, preparer), new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); } @@ -2176,13 +2409,15 @@ protected Mono doFindOne(String collectionName, Document query, @Nullable * query document is specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record * @param fields the document that specifies the fields to be returned * @param entityClass the parameterized type of the returned list. * @return the List of converted objects. */ - protected Flux doFind(String collectionName, Document query, Document fields, Class entityClass) { - return doFind(collectionName, query, fields, entityClass, null, + protected Flux doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass) { + return doFind(collectionName, collectionPreparer, query, fields, entityClass, null, new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName)); } @@ -2192,85 +2427,78 @@ protected Flux doFind(String collectionName, Document query, Document fie * specified as a standard Document and so is the fields specification. * * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param query the query document that specifies the criteria used to find a record. * @param fields the document that specifies the fields to be returned. * @param entityClass the parameterized type of the returned list. - * @param preparer allows for customization of the {@link DBCursor} used when iterating over the result set, (apply - * limits, skips and so on). + * @param preparer allows for customization of the {@link com.mongodb.client.FindIterable} used when iterating over + * the result set, (apply limits, skips and so on). * @return the {@link List} of converted objects. */ - protected Flux doFind(String collectionName, Document query, Document fields, Class entityClass, - FindPublisherPreparer preparer) { - return doFind(collectionName, query, fields, entityClass, preparer, - new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName)); - } - - protected Flux doFind(String collectionName, Document query, Document fields, Class entityClass, - @Nullable FindPublisherPreparer preparer, DocumentCallback objectCallback) { - - MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - - Document mappedFields = queryMapper.getMappedFields(fields, entity); - Document mappedQuery = queryMapper.getMappedObject(query, entity); - - if (LOGGER.isDebugEnabled()) { - LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s", - serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName)); - } - - return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, objectCallback, - collectionName); + protected Flux doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class entityClass, FindPublisherPreparer preparer) { + return doFind(collectionName, collectionPreparer, query, fields, entityClass, preparer, + new ReadDocumentCallback<>(mongoConverter, entityClass, collectionName)); } - /** - * Map the results of an ad-hoc query on the default MongoDB collection to a List of the specified targetClass while - * using sourceClass for mapping the query. - * - * @since 2.0 - */ - Flux doFind(String collectionName, Document query, Document fields, Class sourceClass, - Class targetClass, FindPublisherPreparer preparer) { + protected Flux doFind(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, + Class entityClass, @Nullable FindPublisherPreparer preparer, DocumentCallback objectCallback) { - MongoPersistentEntity entity = mappingContext.getRequiredPersistentEntity(sourceClass); + MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - Document mappedFields = getMappedFieldsObject(fields, entity, targetClass); - Document mappedQuery = queryMapper.getMappedObject(query, entity); + QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); + Document mappedFields = queryContext.getMappedFields(entity, EntityProjection.nonProjecting(entityClass)); + Document mappedQuery = queryContext.getMappedQuery(entity); if (LOGGER.isDebugEnabled()) { - LOGGER.debug("find using query: {} fields: {} for class: {} in collection: {}", - serializeToJsonSafely(mappedQuery), mappedFields, sourceClass, collectionName); + LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, entityClass, collectionName)); } - return executeFindMultiInternal(new FindCallback(mappedQuery, mappedFields), preparer, - new ProjectingReadCallback<>(mongoConverter, sourceClass, targetClass, collectionName), collectionName); + return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields), + preparer != null ? preparer : FindPublisherPreparer.NO_OP_PREPARER, objectCallback, collectionName); + } + + CollectionPreparer> createCollectionPreparer(Query query) { + return ReactiveCollectionPreparerDelegate.of(query); } - private Document getMappedFieldsObject(Document fields, MongoPersistentEntity entity, Class targetType) { - return queryMapper.getMappedFields(addFieldsForProjection(fields, entity.getType(), targetType), entity); + CollectionPreparer> createCollectionPreparer(Query query, @Nullable MongoAction action) { + CollectionPreparer> collectionPreparer = createCollectionPreparer(query); + if (action == null) { + return collectionPreparer; + } + return collectionPreparer.andThen(collection -> { + WriteConcern writeConcern = prepareWriteConcern(action); + return writeConcern != null ? collection.withWriteConcern(writeConcern) : collection; + }); } /** - * For cases where {@code fields} is {@literal null} or {@literal empty} add fields required for creating the - * projection (target) type if the {@code targetType} is a {@literal closed interface projection}. + * Map the results of an ad-hoc query on the default MongoDB collection to a List of the specified targetClass while + * using sourceClass for mapping the query. * - * @param fields must not be {@literal null}. - * @param domainType must not be {@literal null}. - * @param targetType must not be {@literal null}. - * @return {@link Document} with fields to be included. + * @since 2.0 */ - private Document addFieldsForProjection(Document fields, Class domainType, Class targetType) { + Flux doFind(String collectionName, CollectionPreparer> collectionPreparer, + Document query, Document fields, Class sourceClass, Class targetClass, + QueryResultConverter resultConverter, FindPublisherPreparer preparer) { - if (!fields.isEmpty() || !targetType.isInterface() || ClassUtils.isAssignable(domainType, targetType)) { - return fields; - } + MongoPersistentEntity entity = mappingContext.getPersistentEntity(sourceClass); + EntityProjection projection = operations.introspectProjection(targetClass, sourceClass); - ProjectionInformation projectionInformation = projectionFactory.getProjectionInformation(targetType); + QueryContext queryContext = queryOperations.createQueryContext(new BasicQuery(query, fields)); + Document mappedFields = queryContext.getMappedFields(entity, projection); + Document mappedQuery = queryContext.getMappedQuery(entity); - if (projectionInformation.isClosed()) { - projectionInformation.getInputProperties().forEach(it -> fields.append(it.getName(), 1)); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug(String.format("find using query: %s fields: %s for class: %s in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, sourceClass, collectionName)); } - return fields; + return executeFindMultiInternal(new FindCallback(collectionPreparer, mappedQuery, mappedFields), preparer, + getResultReader(projection, collectionName, resultConverter), collectionName); } protected CreateCollectionOptions convertToCreateCollectionOptions(@Nullable CollectionOptions collectionOptions) { @@ -2279,92 +2507,66 @@ protected CreateCollectionOptions convertToCreateCollectionOptions(@Nullable Col protected CreateCollectionOptions convertToCreateCollectionOptions(@Nullable CollectionOptions collectionOptions, Class entityType) { - - CreateCollectionOptions result = new CreateCollectionOptions(); - - if (collectionOptions == null) { - return result; - } - - collectionOptions.getCapped().ifPresent(result::capped); - collectionOptions.getSize().ifPresent(result::sizeInBytes); - collectionOptions.getMaxDocuments().ifPresent(result::maxDocuments); - collectionOptions.getCollation().map(Collation::toMongoCollation).ifPresent(result::collation); - - collectionOptions.getValidationOptions().ifPresent(it -> { - - ValidationOptions validationOptions = new ValidationOptions(); - - it.getValidationAction().ifPresent(validationOptions::validationAction); - it.getValidationLevel().ifPresent(validationOptions::validationLevel); - - it.getValidator().ifPresent(val -> validationOptions.validator(getMappedValidator(val, entityType))); - - result.validationOptions(validationOptions); - }); - - return result; - } - - private Document getMappedValidator(Validator validator, Class domainType) { - - Document validationRules = validator.toDocument(); - - if (validationRules.containsKey("$jsonSchema")) { - return schemaMapper.mapSchema(validationRules, domainType); - } - - return queryMapper.getMappedObject(validationRules, mappingContext.getPersistentEntity(domainType)); + return operations.convertToCreateCollectionOptions(collectionOptions, entityType); } /** * Map the results of an ad-hoc query on the default MongoDB collection to an object using the template's converter. - * The first document that matches the query is returned and also removed from the collection in the database. - *

          + * The first document that matches the query is returned and also removed from the collection in the database.
          * The query document is specified as a standard Document and so is the fields specification. * - * @param collectionName name of the collection to retrieve the objects from - * @param query the query document that specifies the criteria used to find a record - * @param collation collation + * @param collectionName name of the collection to retrieve the objects from. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param query the query document that specifies the criteria used to find a record. + * @param collation collation. * @param entityClass the parameterized type of the returned list. * @return the List of converted objects. */ - protected Mono doFindAndRemove(String collectionName, Document query, Document fields, Document sort, - @Nullable Collation collation, Class entityClass) { + protected Mono doFindAndRemove(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, + @Nullable Document sort, @Nullable Collation collation, Class entityClass) { if (LOGGER.isDebugEnabled()) { LOGGER.debug(String.format("findAndRemove using query: %s fields: %s sort: %s for class: %s in collection: %s", - serializeToJsonSafely(query), fields, sort, entityClass, collectionName)); + serializeToJsonSafely(query), fields, serializeToJsonSafely(sort), entityClass, collectionName)); } MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); - return executeFindOneInternal( - new FindAndRemoveCallback(queryMapper.getMappedObject(query, entity), fields, sort, collation), + return executeFindOneInternal(new FindAndRemoveCallback(collectionPreparer, + queryMapper.getMappedObject(query, entity), fields, sort, collation), new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); } - protected Mono doFindAndModify(String collectionName, Document query, Document fields, Document sort, - Class entityClass, Update update, FindAndModifyOptions options) { + Mono doFindAndModify(String collectionName, + CollectionPreparer> collectionPreparer, Document query, Document fields, + @Nullable Document sort, Class entityClass, UpdateDefinition update, FindAndModifyOptions options, + QueryResultConverter resultConverter) { MongoPersistentEntity entity = mappingContext.getPersistentEntity(entityClass); + UpdateContext updateContext = queryOperations.updateSingleContext(update, query, false); + updateContext.increaseVersionForUpdateIfNecessary(entity); return Mono.defer(() -> { - increaseVersionForUpdateIfNecessary(entity, update); - - Document mappedQuery = queryMapper.getMappedObject(query, entity); - Document mappedUpdate = updateMapper.getMappedObject(update.getUpdateObject(), entity); + Document mappedQuery = updateContext.getMappedQuery(entity); + Object mappedUpdate = updateContext.isAggregationUpdate() ? updateContext.getUpdatePipeline(entityClass) + : updateContext.getMappedUpdate(entity); if (LOGGER.isDebugEnabled()) { LOGGER.debug(String.format( "findAndModify using query: %s fields: %s sort: %s for class: %s and update: %s " + "in collection: %s", - serializeToJsonSafely(mappedQuery), fields, sort, entityClass, serializeToJsonSafely(mappedUpdate), - collectionName)); + serializeToJsonSafely(mappedQuery), fields, serializeToJsonSafely(sort), entityClass, + serializeToJsonSafely(mappedUpdate), collectionName)); } - return executeFindOneInternal(new FindAndModifyCallback(mappedQuery, fields, sort, mappedUpdate, options), - new ReadDocumentCallback<>(this.mongoConverter, entityClass, collectionName), collectionName); + EntityProjection projection = EntityProjection.nonProjecting(entityClass); + DocumentCallback callback = getResultReader(projection, collectionName, resultConverter); + + return executeFindOneInternal( + new FindAndModifyCallback(collectionPreparer, mappedQuery, fields, sort, mappedUpdate, + update.getArrayFilters().stream().map(ArrayFilter::asDocument).collect(Collectors.toList()), options), + callback, collectionName); }); } @@ -2372,6 +2574,7 @@ protected Mono doFindAndModify(String collectionName, Document query, Doc * Customize this part for findAndReplace. * * @param collectionName The name of the collection to perform the operation in. + * @param collectionPreparer the preparer to prepare the collection for the actual use. * @param mappedQuery the query to look up documents. * @param mappedFields the fields to project the result to. * @param mappedSort the sort to be applied when executing the query. @@ -2384,35 +2587,97 @@ protected Mono doFindAndModify(String collectionName, Document query, Doc * {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}. * @since 2.1 */ - protected Mono doFindAndReplace(String collectionName, Document mappedQuery, Document mappedFields, + protected Mono doFindAndReplace(String collectionName, + CollectionPreparer> collectionPreparer, Document mappedQuery, Document mappedFields, Document mappedSort, com.mongodb.client.model.Collation collation, Class entityType, Document replacement, FindAndReplaceOptions options, Class resultType) { + EntityProjection projection = operations.introspectProjection(resultType, entityType); + + return doFindAndReplace(collectionName, collectionPreparer, mappedQuery, mappedFields, mappedSort, collation, + entityType, replacement, options, projection, QueryResultConverter.entity()); + } + + /** + * Customize this part for findAndReplace. + * + * @param collectionName The name of the collection to perform the operation in. + * @param collectionPreparer the preparer to prepare the collection for the actual use. + * @param mappedQuery the query to look up documents. + * @param mappedFields the fields to project the result to. + * @param mappedSort the sort to be applied when executing the query. + * @param collation collation settings for the query. Can be {@literal null}. + * @param entityType the source domain type. + * @param replacement the replacement {@link Document}. + * @param options applicable options. + * @param projection the projection descriptor. + * @return {@link Mono#empty()} if object does not exist, {@link FindAndReplaceOptions#isReturnNew() return new} is + * {@literal false} and {@link FindAndReplaceOptions#isUpsert() upsert} is {@literal false}. + * @since 3.4 + */ + private Mono doFindAndReplace(String collectionName, + CollectionPreparer> collectionPreparer, Document mappedQuery, Document mappedFields, + Document mappedSort, com.mongodb.client.model.Collation collation, Class entityType, Document replacement, + FindAndReplaceOptions options, EntityProjection projection, + QueryResultConverter resultConverter) { + return Mono.defer(() -> { if (LOGGER.isDebugEnabled()) { - LOGGER.debug( - "findAndReplace using query: {} fields: {} sort: {} for class: {} and replacement: {} " - + "in collection: {}", - serializeToJsonSafely(mappedQuery), mappedFields, mappedSort, entityType, - serializeToJsonSafely(replacement), collectionName); + LOGGER.debug(String.format( + "findAndReplace using query: %s fields: %s sort: %s for class: %s and replacement: %s " + + "in collection: %s", + serializeToJsonSafely(mappedQuery), mappedFields, serializeToJsonSafely(mappedSort), entityType, + serializeToJsonSafely(replacement), collectionName)); } - maybeEmitEvent(new BeforeSaveEvent<>(replacement, replacement, collectionName)); + DocumentCallback resultReader = getResultReader(projection, collectionName, resultConverter); + + return executeFindOneInternal(new FindAndReplaceCallback(collectionPreparer, mappedQuery, mappedFields, + mappedSort, replacement, collation, options), resultReader, collectionName); - return executeFindOneInternal( - new FindAndReplaceCallback(mappedQuery, mappedFields, mappedSort, replacement, collation, options), - new ProjectingReadCallback<>(this.mongoConverter, entityType, resultType, collectionName), collectionName); }); } protected , T> E maybeEmitEvent(E event) { + eventDelegate.publishEvent(event); + return event; + } - if (null != eventPublisher) { - eventPublisher.publishEvent(event); + protected Mono maybeCallBeforeConvert(T object, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(ReactiveBeforeConvertCallback.class, object, collection); } - return event; + return Mono.just(object); + } + + protected Mono maybeCallBeforeSave(T object, Document document, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(ReactiveBeforeSaveCallback.class, object, document, collection); + } + + return Mono.just(object); + } + + protected Mono maybeCallAfterSave(T object, Document document, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(ReactiveAfterSaveCallback.class, object, document, collection); + } + + return Mono.just(object); + } + + protected Mono maybeCallAfterConvert(T object, Document document, String collection) { + + if (entityCallbacks != null) { + return entityCallbacks.callback(ReactiveAfterConvertCallback.class, object, document, collection); + } + + return Mono.just(object); } private MongoCollection getAndPrepareCollection(MongoDatabase db, String collectionName) { @@ -2425,25 +2690,33 @@ private MongoCollection getAndPrepareCollection(MongoDatabase db, Stri } } - protected void ensureNotIterable(Object o) { - - boolean isIterable = o.getClass().isArray() - || ITERABLE_CLASSES.stream().anyMatch(iterableClass -> iterableClass.isAssignableFrom(o.getClass()) - || o.getClass().getName().equals(iterableClass.getName())); + /** + * Ensure the given {@literal source} is not an {@link java.lang.reflect.Array}, {@link Collection} or + * {@link Iterator}. + * + * @param source can be {@literal null}. + * @since 3.2. + */ + protected void ensureNotCollectionLike(@Nullable Object source) { - if (isIterable) { + if (EntityOperations.isCollectionLike(source) || source instanceof Publisher) { throw new IllegalArgumentException("Cannot use a collection here."); } } /** * Prepare the collection before any processing is done using it. This allows a convenient way to apply settings like - * slaveOk() etc. Can be overridden in sub-classes. + * withCodecRegistry() etc. Can be overridden in sub-classes. * * @param collection */ protected MongoCollection prepareCollection(MongoCollection collection) { - return this.readPreference != null ? collection.withReadPreference(readPreference) : collection; + + if (this.readPreference != null && this.readPreference != collection.getReadPreference()) { + return collection.withReadPreference(readPreference); + } + + return collection; } /** @@ -2466,19 +2739,26 @@ protected MongoDatabase prepareDatabase(MongoDatabase database) { * @see #setWriteConcern(WriteConcern) * @see #setWriteConcernResolver(WriteConcernResolver) */ - @Nullable - protected WriteConcern prepareWriteConcern(MongoAction mongoAction) { + protected @Nullable WriteConcern prepareWriteConcern(MongoAction mongoAction) { WriteConcern wc = writeConcernResolver.resolve(mongoAction); return potentiallyForceAcknowledgedWrite(wc); } + /** + * @return the {@link MongoDatabaseFactory} in use. + * @since 3.1.4 + */ + public ReactiveMongoDatabaseFactory getMongoDatabaseFactory() { + return mongoDatabaseFactory; + } + @Nullable private WriteConcern potentiallyForceAcknowledgedWrite(@Nullable WriteConcern wc) { if (ObjectUtils.nullSafeEquals(WriteResultChecking.EXCEPTION, writeResultChecking)) { if (wc == null || wc.getWObject() == null - || (wc.getWObject() instanceof Number && ((Number) wc.getWObject()).intValue() < 1)) { + || (wc.getWObject() instanceof Number concern && concern.intValue() < 1)) { return WriteConcern.ACKNOWLEDGED; } } @@ -2502,7 +2782,7 @@ private Mono executeFindOneInternal(ReactiveCollectionCallback DocumentCallback objectCallback, String collectionName) { return createMono(collectionName, - collection -> Mono.from(collectionCallback.doInCollection(collection)).map(objectCallback::doWith)); + collection -> Mono.from(collectionCallback.doInCollection(collection)).flatMap(objectCallback::doWith)); } /** @@ -2524,29 +2804,22 @@ private Mono executeFindOneInternal(ReactiveCollectionCallback * @return */ private Flux executeFindMultiInternal(ReactiveCollectionQueryCallback collectionCallback, - @Nullable FindPublisherPreparer preparer, DocumentCallback objectCallback, String collectionName) { + FindPublisherPreparer preparer, DocumentCallback objectCallback, String collectionName) { return createFlux(collectionName, collection -> { - - FindPublisher findPublisher = collectionCallback.doInCollection(collection); - - if (preparer != null) { - findPublisher = preparer.prepare(findPublisher); - } - return Flux.from(findPublisher).map(objectCallback::doWith); + return Flux.from(preparer.initiateFind(collection, collectionCallback::doInCollection)) + .flatMapSequential(objectCallback::doWith); }); } - private T execute(MongoDatabaseCallback action) { + @SuppressWarnings("unchecked") + private DocumentCallback getResultReader(EntityProjection projection, String collectionName, + QueryResultConverter resultConverter) { - Assert.notNull(action, "MongoDatabaseCallback must not be null!"); + DocumentCallback readCallback = new ProjectingReadCallback<>(mongoConverter, projection, collectionName); - try { - MongoDatabase db = this.doGetDatabase(); - return action.doInDatabase(db); - } catch (RuntimeException e) { - throw potentiallyConvertRuntimeException(e, exceptionTranslator); - } + return resultConverter == QueryResultConverter.entity() ? (DocumentCallback) readCallback + : new QueryResultConverterCallback<>(resultConverter, readCallback); } /** @@ -2558,8 +2831,8 @@ private Function translateException() { return throwable -> { - if (throwable instanceof RuntimeException) { - return potentiallyConvertRuntimeException((RuntimeException) throwable, exceptionTranslator); + if (throwable instanceof RuntimeException runtimeException) { + return potentiallyConvertRuntimeException(runtimeException, exceptionTranslator); } return throwable; @@ -2580,42 +2853,11 @@ private static RuntimeException potentiallyConvertRuntimeException(RuntimeExcept return resolved == null ? ex : resolved; } - @Nullable - private MongoPersistentEntity getPersistentEntity(@Nullable Class type) { + private @Nullable MongoPersistentEntity getPersistentEntity(@Nullable Class type) { return type == null ? null : mappingContext.getPersistentEntity(type); } - @Nullable - private MongoPersistentProperty getIdPropertyFor(@Nullable Class type) { - - if (type == null) { - return null; - } - - MongoPersistentEntity persistentEntity = mappingContext.getPersistentEntity(type); - return persistentEntity != null ? persistentEntity.getIdProperty() : null; - } - - private String determineEntityCollectionName(@Nullable T obj) { - - if (null != obj) { - return determineCollectionName(obj.getClass()); - } - - return null; - } - - String determineCollectionName(@Nullable Class entityClass) { - - if (entityClass == null) { - throw new InvalidDataAccessApiUsageException( - "No class parameter provided, entity collection can't be determined!"); - } - - return mappingContext.getRequiredPersistentEntity(entityClass).getCollection(); - } - - private static MappingMongoConverter getDefaultMongoConverter() { + private MappingMongoConverter getDefaultMongoConverter() { MongoCustomConversions conversions = new MongoCustomConversions(Collections.emptyList()); @@ -2625,59 +2867,68 @@ private static MappingMongoConverter getDefaultMongoConverter() { MappingMongoConverter converter = new MappingMongoConverter(NO_OP_REF_RESOLVER, context); converter.setCustomConversions(conversions); + converter.setCodecRegistryProvider(this.mongoDatabaseFactory); converter.afterPropertiesSet(); return converter; } - private Document getMappedSortObject(Query query, Class type) { + @Contract("null, _ -> null") + private @Nullable Document getMappedSortObject(@Nullable Query query, Class type) { if (query == null) { return null; } - return queryMapper.getMappedSort(query.getSortObject(), mappingContext.getPersistentEntity(type)); + return getMappedSortObject(query.getSortObject(), type); + } + + @Contract("null, _ -> null") + private @Nullable Document getMappedSortObject(@Nullable Document sortObject, Class type) { + + if (ObjectUtils.isEmpty(sortObject)) { + return null; + } + + return queryMapper.getMappedSort(sortObject, mappingContext.getPersistentEntity(type)); } // Callback implementations /** * Simple {@link ReactiveCollectionCallback} that takes a query {@link Document} plus an optional fields specification - * {@link Document} and executes that against the {@link DBCollection}. + * {@link Document} and executes that against the {@link MongoCollection}. * * @author Oliver Gierke * @author Thomas Risberg + * @author Christoph Strobl */ private static class FindOneCallback implements ReactiveCollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Optional fields; - private final Optional collation; + private final FindPublisherPreparer preparer; - FindOneCallback(Document query, @Nullable Document fields, @Nullable Collation collation) { + FindOneCallback(CollectionPreparer> collectionPreparer, Document query, + @Nullable Document fields, FindPublisherPreparer preparer) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = Optional.ofNullable(fields); - this.collation = Optional.ofNullable(collation); + this.preparer = preparer; } @Override public Publisher doInCollection(MongoCollection collection) throws MongoException, DataAccessException { - FindPublisher publisher = collection.find(query, Document.class); - - if (LOGGER.isDebugEnabled()) { - - LOGGER.debug("findOne using query: {} fields: {} in db.collection: {}", serializeToJsonSafely(query), - serializeToJsonSafely(fields.orElseGet(Document::new)), collection.getNamespace().getFullName()); - } + FindPublisher publisher = preparer.initiateFind(collectionPreparer.prepare(collection), + col -> col.find(query, Document.class)); if (fields.isPresent()) { publisher = publisher.projection(fields.get()); } - publisher = collation.map(Collation::toMongoCollation).map(publisher::collation).orElse(publisher); - return publisher.limit(1).first(); } } @@ -2690,14 +2941,18 @@ public Publisher doInCollection(MongoCollection collection) */ private static class FindCallback implements ReactiveCollectionQueryCallback { + private final CollectionPreparer> collectionPreparer; + private final @Nullable Document query; private final @Nullable Document fields; - FindCallback(@Nullable Document query) { - this(query, null); + FindCallback(CollectionPreparer> collectionPreparer, @Nullable Document query) { + this(collectionPreparer, query, null); } - FindCallback(Document query, Document fields) { + FindCallback(CollectionPreparer> collectionPreparer, @Nullable Document query, + @Nullable Document fields) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; } @@ -2705,11 +2960,12 @@ private static class FindCallback implements ReactiveCollectionQueryCallback doInCollection(MongoCollection collection) { + MongoCollection collectionToUse = collectionPreparer.prepare(collection); FindPublisher findPublisher; if (ObjectUtils.isEmpty(query)) { - findPublisher = collection.find(Document.class); + findPublisher = collectionToUse.find(Document.class); } else { - findPublisher = collection.find(query, Document.class); + findPublisher = collectionToUse.find(query, Document.class); } if (ObjectUtils.isEmpty(fields)) { @@ -2728,13 +2984,15 @@ public FindPublisher doInCollection(MongoCollection collecti */ private static class FindAndRemoveCallback implements ReactiveCollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Document fields; - private final Document sort; + private final @Nullable Document sort; private final Optional collation; - FindAndRemoveCallback(Document query, Document fields, Document sort, @Nullable Collation collation) { - + FindAndRemoveCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, @Nullable Document sort, @Nullable Collation collation) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; this.sort = sort; @@ -2748,7 +3006,7 @@ public Publisher doInCollection(MongoCollection collection) FindOneAndDeleteOptions findOneAndDeleteOptions = convertToFindOneAndDeleteOptions(fields, sort); collation.map(Collation::toMongoCollation).ifPresent(findOneAndDeleteOptions::collation); - return collection.findOneAndDelete(query, findOneAndDeleteOptions); + return collectionPreparer.prepare(collection).findOneAndDelete(query, findOneAndDeleteOptions); } } @@ -2757,19 +3015,24 @@ public Publisher doInCollection(MongoCollection collection) */ private static class FindAndModifyCallback implements ReactiveCollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; - private final Document fields; - private final Document sort; - private final Document update; + private final @Nullable Document fields; + private final @Nullable Document sort; + private final Object update; + private final List arrayFilters; private final FindAndModifyOptions options; - FindAndModifyCallback(Document query, Document fields, Document sort, Document update, + FindAndModifyCallback(CollectionPreparer> collectionPreparer, Document query, + @Nullable Document fields, @Nullable Document sort, Object update, List arrayFilters, FindAndModifyOptions options) { + this.collectionPreparer = collectionPreparer; this.query = query; this.fields = fields; this.sort = sort; this.update = update; + this.arrayFilters = arrayFilters; this.options = options; } @@ -2777,21 +3040,30 @@ private static class FindAndModifyCallback implements ReactiveCollectionCallback public Publisher doInCollection(MongoCollection collection) throws MongoException, DataAccessException { + MongoCollection collectionToUse = collectionPreparer.prepare(collection); if (options.isRemove()) { FindOneAndDeleteOptions findOneAndDeleteOptions = convertToFindOneAndDeleteOptions(fields, sort); findOneAndDeleteOptions = options.getCollation().map(Collation::toMongoCollation) .map(findOneAndDeleteOptions::collation).orElse(findOneAndDeleteOptions); - return collection.findOneAndDelete(query, findOneAndDeleteOptions); + return collectionToUse.findOneAndDelete(query, findOneAndDeleteOptions); + } + + FindOneAndUpdateOptions findOneAndUpdateOptions = convertToFindOneAndUpdateOptions(options, fields, sort, + arrayFilters); + if (update instanceof Document document) { + return collection.findOneAndUpdate(query, document, findOneAndUpdateOptions); + } else if (update instanceof List) { + return collectionToUse.findOneAndUpdate(query, (List) update, findOneAndUpdateOptions); } - FindOneAndUpdateOptions findOneAndUpdateOptions = convertToFindOneAndUpdateOptions(options, fields, sort); - return collection.findOneAndUpdate(query, update, findOneAndUpdateOptions); + return Flux + .error(new IllegalArgumentException(String.format("Using %s is not supported in findOneAndUpdate", update))); } - private FindOneAndUpdateOptions convertToFindOneAndUpdateOptions(FindAndModifyOptions options, Document fields, - Document sort) { + private static FindOneAndUpdateOptions convertToFindOneAndUpdateOptions(FindAndModifyOptions options, + @Nullable Document fields, @Nullable Document sort, List arrayFilters) { FindOneAndUpdateOptions result = new FindOneAndUpdateOptions(); @@ -2805,6 +3077,10 @@ private FindOneAndUpdateOptions convertToFindOneAndUpdateOptions(FindAndModifyOp result = options.getCollation().map(Collation::toMongoCollation).map(result::collation).orElse(result); + if (!CollectionUtils.isEmpty(arrayFilters)) { + result.arrayFilters(arrayFilters); + } + return result; } } @@ -2816,26 +3092,34 @@ private FindOneAndUpdateOptions convertToFindOneAndUpdateOptions(FindAndModifyOp * @author Christoph Strobl * @since 2.1 */ - @RequiredArgsConstructor(access = AccessLevel.PACKAGE) private static class FindAndReplaceCallback implements ReactiveCollectionCallback { + private final CollectionPreparer> collectionPreparer; private final Document query; private final Document fields; private final Document sort; private final Document update; - private final @Nullable com.mongodb.client.model.Collation collation; + private final com.mongodb.client.model.@Nullable Collation collation; private final FindAndReplaceOptions options; - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveCollectionCallback#doInCollection(com.mongodb.reactivestreams.client.MongoCollection) - */ + FindAndReplaceCallback(CollectionPreparer> collectionPreparer, Document query, + Document fields, Document sort, Document update, com.mongodb.client.model.@Nullable Collation collation, + FindAndReplaceOptions options) { + this.collectionPreparer = collectionPreparer; + this.query = query; + this.fields = fields; + this.sort = sort; + this.update = update; + this.collation = collation; + this.options = options; + } + @Override public Publisher doInCollection(MongoCollection collection) throws MongoException, DataAccessException { FindOneAndReplaceOptions findOneAndReplaceOptions = convertToFindOneAndReplaceOptions(options, fields, sort); - return collection.findOneAndReplace(query, update, findOneAndReplaceOptions); + return collectionPreparer.prepare(collection).findOneAndReplace(query, update, findOneAndReplaceOptions); } private FindOneAndReplaceOptions convertToFindOneAndReplaceOptions(FindAndReplaceOptions options, Document fields, @@ -2855,7 +3139,8 @@ private FindOneAndReplaceOptions convertToFindOneAndReplaceOptions(FindAndReplac } } - private static FindOneAndDeleteOptions convertToFindOneAndDeleteOptions(Document fields, Document sort) { + private static FindOneAndDeleteOptions convertToFindOneAndDeleteOptions(@Nullable Document fields, + @Nullable Document sort) { FindOneAndDeleteOptions result = new FindOneAndDeleteOptions(); result = result.projection(fields).sort(sort); @@ -2871,7 +3156,7 @@ private static FindOneAndDeleteOptions convertToFindOneAndDeleteOptions(Document interface DocumentCallback { - T doWith(Document object); + Mono doWith(Document object); } /** @@ -2892,14 +3177,32 @@ interface MongoDatabaseCallback { */ interface ReactiveCollectionQueryCallback extends ReactiveCollectionCallback { + @Override FindPublisher doInCollection(MongoCollection collection) throws MongoException, DataAccessException; } + static class QueryResultConverterCallback implements DocumentCallback { + + private final QueryResultConverter converter; + private final DocumentCallback delegate; + + QueryResultConverterCallback(QueryResultConverter converter, DocumentCallback delegate) { + this.converter = converter; + this.delegate = delegate; + } + + @Override + public Mono doWith(Document object) { + return delegate.doWith(object).map(it -> converter.mapDocument(object, () -> it)); + } + } + /** * Simple {@link DocumentCallback} that will transform {@link Document} into the given target type using the given * {@link EntityReader}. * * @author Mark Paluch + * @author Roman Puchkovskiy */ class ReadDocumentCallback implements DocumentCallback { @@ -2909,68 +3212,68 @@ class ReadDocumentCallback implements DocumentCallback { ReadDocumentCallback(EntityReader reader, Class type, String collectionName) { - Assert.notNull(reader, "EntityReader must not be null!"); - Assert.notNull(type, "Entity type must not be null!"); + Assert.notNull(reader, "EntityReader must not be null"); + Assert.notNull(type, "Entity type must not be null"); this.reader = reader; this.type = type; this.collectionName = collectionName; } - public T doWith(@Nullable Document object) { + @Override + public Mono doWith(Document document) { + + maybeEmitEvent(new AfterLoadEvent<>(document, type, collectionName)); - if (null != object) { - maybeEmitEvent(new AfterLoadEvent<>(object, type, collectionName)); - } - T source = reader.read(type, object); - if (null != source) { - maybeEmitEvent(new AfterConvertEvent<>(object, source, collectionName)); + T entity = reader.read(type, document); + + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", reader)); } - return source; + + maybeEmitEvent(new AfterConvertEvent<>(document, entity, collectionName)); + return maybeCallAfterConvert(entity, document, collectionName); } } /** - * {@link MongoTemplate.DocumentCallback} transforming {@link Document} into the given {@code targetType} or - * decorating the {@code sourceType} with a {@literal projection} in case the {@code targetType} is an - * {@litera interface}. + * {@link DocumentCallback} transforming {@link Document} into the given {@code targetType} or decorating the + * {@code sourceType} with a {@literal projection} in case the {@code targetType} is an {@literal interface}. * * @param * @param * @author Christoph Strobl + * @author Roman Puchkovskiy * @since 2.0 */ - @RequiredArgsConstructor private class ProjectingReadCallback implements DocumentCallback { - private final @NonNull EntityReader reader; - private final @NonNull Class entityType; - private final @NonNull Class targetType; - private final @NonNull String collectionName; + private final MongoConverter reader; + private final EntityProjection projection; + private final String collectionName; - @Nullable + ProjectingReadCallback(MongoConverter reader, EntityProjection projection, String collectionName) { + this.reader = reader; + this.projection = projection; + this.collectionName = collectionName; + } + + @Override @SuppressWarnings("unchecked") - public T doWith(@Nullable Document object) { + public Mono doWith(Document document) { - if (object == null) { - return null; - } + Class returnType = projection.getMappedType().getType(); + maybeEmitEvent(new AfterLoadEvent<>(document, returnType, collectionName)); - Class typeToRead = targetType.isInterface() || targetType.isAssignableFrom(entityType) // - ? entityType // - : targetType; + Object entity = reader.project(projection, document); - if (null != object) { - maybeEmitEvent(new AfterLoadEvent<>(object, typeToRead, collectionName)); + if (entity == null) { + throw new MappingException(String.format("EntityReader %s returned null", reader)); } - Object source = reader.read(typeToRead, object); - Object result = targetType.isInterface() ? projectionFactory.createProjection(targetType, source) : source; - - if (null != source) { - maybeEmitEvent(new AfterConvertEvent<>(object, result, collectionName)); - } - return (T) result; + T castEntity = (T) entity; + maybeEmitEvent(new AfterConvertEvent<>(document, castEntity, collectionName)); + return maybeCallAfterConvert(castEntity, document, collectionName); } } @@ -2979,34 +3282,47 @@ public T doWith(@Nullable Document object) { * a delegate and creates a {@link GeoResult} from the result. * * @author Mark Paluch + * @author Chrstoph Strobl + * @author Roman Puchkovskiy */ - static class GeoNearResultDbObjectCallback implements DocumentCallback> { + static class GeoNearResultDocumentCallback implements DocumentCallback> { + private final String distanceField; private final DocumentCallback delegate; private final Metric metric; /** - * Creates a new {@link GeoNearResultDbObjectCallback} using the given {@link DocumentCallback} delegate for + * Creates a new {@link GeoNearResultDocumentCallback} using the given {@link DocumentCallback} delegate for * {@link GeoResult} content unmarshalling. * + * @param distanceField the field to read the distance from. * @param delegate must not be {@literal null}. + * @param metric the {@link Metric} to apply to the result distance. */ - GeoNearResultDbObjectCallback(DocumentCallback delegate, Metric metric) { + GeoNearResultDocumentCallback(String distanceField, DocumentCallback delegate, Metric metric) { - Assert.notNull(delegate, "DocumentCallback must not be null!"); + Assert.notNull(delegate, "DocumentCallback must not be null"); + this.distanceField = distanceField; this.delegate = delegate; this.metric = metric; } - public GeoResult doWith(Document object) { + @Override + public Mono> doWith(Document object) { + + double distance = getDistance(object); + + return delegate.doWith(object).map(doWith -> new GeoResult<>(doWith, Distance.of(distance, metric))); + } - double distance = (Double) object.get("dis"); - Document content = (Document) object.get("obj"); + double getDistance(Document object) { - T doWith = delegate.doWith(content); + if (object.containsKey(distanceField)) { + return NumberUtils.convertNumberToTargetClass(object.get(distanceField, Number.class), Double.class); + } - return new GeoResult<>(doWith, new Distance(distance, metric)); + return Double.NaN; } } @@ -3015,73 +3331,80 @@ public GeoResult doWith(Document object) { */ class QueryFindPublisherPreparer implements FindPublisherPreparer { - private final @Nullable Query query; + private final Query query; + + private final Document sortObject; + + private final int limit; + + private final long skip; private final @Nullable Class type; - QueryFindPublisherPreparer(@Nullable Query query, @Nullable Class type) { + QueryFindPublisherPreparer(Query query, @Nullable Class type) { + this(query, query.getSortObject(), query.getLimit(), query.getSkip(), type); + } + + QueryFindPublisherPreparer(Query query, Document sortObject, int limit, long skip, @Nullable Class type) { this.query = query; + this.sortObject = sortObject; + this.limit = limit; + this.skip = skip; this.type = type; } - @SuppressWarnings("deprecation") - public FindPublisher prepare(FindPublisher findPublisher) { - - if (query == null) { - return findPublisher; - } - - FindPublisher findPublisherToUse; + @Override + public FindPublisher prepare(FindPublisher findPublisher) { - findPublisherToUse = query.getCollation().map(Collation::toMongoCollation).map(findPublisher::collation) + FindPublisher findPublisherToUse = operations.forType(type) // + .getCollation(query) // + .map(Collation::toMongoCollation) // + .map(findPublisher::collation) // .orElse(findPublisher); + HintFunction hintFunction = HintFunction.from(query.getHint()); Meta meta = query.getMeta(); - if (query.getSkip() <= 0 && query.getLimit() <= 0 && ObjectUtils.isEmpty(query.getSortObject()) - && !StringUtils.hasText(query.getHint()) && !meta.hasValues()) { + if (skip <= 0 && limit <= 0 && ObjectUtils.isEmpty(sortObject) && hintFunction.isEmpty() && !meta.hasValues()) { return findPublisherToUse; } try { - if (query.getSkip() > 0) { - findPublisherToUse = findPublisherToUse.skip((int) query.getSkip()); + if (skip > 0) { + findPublisherToUse = findPublisherToUse.skip((int) skip); } - if (query.getLimit() > 0) { - findPublisherToUse = findPublisherToUse.limit(query.getLimit()); + if (limit > 0) { + findPublisherToUse = findPublisherToUse.limit(limit); } - if (!ObjectUtils.isEmpty(query.getSortObject())) { - Document sort = type != null ? getMappedSortObject(query, type) : query.getSortObject(); + if (!ObjectUtils.isEmpty(sortObject)) { + Document sort = type != null ? getMappedSortObject(sortObject, type) : sortObject; findPublisherToUse = findPublisherToUse.sort(sort); } - if (StringUtils.hasText(query.getHint())) { - findPublisherToUse = findPublisherToUse.hint(Document.parse(query.getHint())); + if (hintFunction.isPresent()) { + findPublisherToUse = hintFunction.apply(mongoDatabaseFactory, findPublisherToUse::hintString, + findPublisherToUse::hint); } if (meta.hasValues()) { - if (StringUtils.hasText(meta.getComment())) { - findPublisherToUse = findPublisherToUse.comment(meta.getComment()); - } - - if (meta.getSnapshot()) { - findPublisherToUse = findPublisherToUse.snapshot(meta.getSnapshot()); + if (meta.hasComment()) { + findPublisherToUse = findPublisherToUse.comment(meta.getRequiredComment()); } - if (meta.getMaxScan() != null) { - findPublisherToUse = findPublisherToUse.maxScan(meta.getMaxScan()); - } - - if (meta.getMaxTimeMsec() != null) { - findPublisherToUse = findPublisherToUse.maxTime(meta.getMaxTimeMsec(), TimeUnit.MILLISECONDS); + if (meta.hasMaxTime()) { + findPublisherToUse = findPublisherToUse.maxTime(meta.getRequiredMaxTimeMsec(), TimeUnit.MILLISECONDS); } if (meta.getCursorBatchSize() != null) { findPublisherToUse = findPublisherToUse.batchSize(meta.getCursorBatchSize()); } + + if (meta.getAllowDiskUse() != null) { + findPublisherToUse = findPublisherToUse.allowDiskUse(meta.getAllowDiskUse()); + } } } catch (RuntimeException e) { @@ -3090,6 +3413,7 @@ public FindPublisher prepare(FindPublisher findPublisher) { return findPublisherToUse; } + } class TailingQueryFindPublisherPreparer extends QueryFindPublisherPreparer { @@ -3099,19 +3423,18 @@ class TailingQueryFindPublisherPreparer extends QueryFindPublisherPreparer { } @Override - public FindPublisher prepare(FindPublisher findPublisher) { + public FindPublisher prepare(FindPublisher findPublisher) { return super.prepare(findPublisher.cursorType(CursorType.TailableAwait)); } } - private static List toDocuments(final Collection documents) { + private static List toDocuments(Collection documents) { return new ArrayList<>(documents); } /** * {@link MongoTemplate} extension bound to a specific {@link ClientSession} that is applied when interacting with the - * server through the driver API. - *

          + * server through the driver API.
          * The prepare steps for {@link MongoDatabase} and {@link MongoCollection} proxy the target and invoke the desired * target method matching the actual arguments plus a {@link ClientSession}. * @@ -3135,60 +3458,34 @@ static class ReactiveSessionBoundMongoTemplate extends ReactiveMongoTemplate { this.session = session; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoTemplate#getCollection(java.lang.String) - */ @Override - public MongoCollection getCollection(String collectionName) { + public Mono> getCollection(String collectionName) { // native MongoDB objects that offer methods with ClientSession must not be proxied. return delegate.getCollection(collectionName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoTemplate#getMongoDatabase() - */ @Override - public MongoDatabase getMongoDatabase() { + public Mono getMongoDatabase() { // native MongoDB objects that offer methods with ClientSession must not be proxied. return delegate.getMongoDatabase(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveMongoTemplate#count(org.springframework.data.mongodb.core.query.Query, java.lang.Class, java.lang.String) - */ @Override - public Mono count(Query query, @Nullable Class entityClass, String collectionName) { - - if (!session.hasActiveTransaction()) { - return super.count(query, entityClass, collectionName); - } - - return createMono(collectionName, collection -> { - - final Document Document = query == null ? null - : delegate.queryMapper.getMappedObject(query.getQueryObject(), - entityClass == null ? null : delegate.mappingContext.getPersistentEntity(entityClass)); - - CountOptions options = new CountOptions(); - if (query != null) { - query.getCollation().map(Collation::toMongoCollation).ifPresent(options::collation); - } - - return collection.countDocuments(Document, options); - }); + protected Mono countCanBeEstimated(Document filter, CountOptions options) { + return Mono.just(false); } } - @RequiredArgsConstructor class IndexCreatorEventListener implements ApplicationListener> { final Consumer subscriptionExceptionHandler; + public IndexCreatorEventListener(Consumer subscriptionExceptionHandler) { + this.subscriptionExceptionHandler = subscriptionExceptionHandler; + } + @Override public void onApplicationEvent(MappingContextEvent event) { @@ -3199,9 +3496,66 @@ public void onApplicationEvent(MappingContextEvent event) { PersistentEntity entity = event.getPersistentEntity(); // Double check type as Spring infrastructure does not consider nested generics - if (entity instanceof MongoPersistentEntity) { - onCheckForIndexes((MongoPersistentEntity) entity, subscriptionExceptionHandler); + if (entity instanceof MongoPersistentEntity mongoPersistentProperties) { + + onCheckForIndexes(mongoPersistentProperties, subscriptionExceptionHandler); } } } + + /** + * Value object chaining together a given source document with its mapped representation and the collection to persist + * it to. + * + * @param + * @author Christoph Strobl + * @since 2.2 + */ + private static class PersistableEntityModel { + + private final T source; + private final @Nullable Document target; + private final String collection; + + private PersistableEntityModel(T source, @Nullable Document target, String collection) { + + this.source = source; + this.target = target; + this.collection = collection; + } + + static PersistableEntityModel of(T source, String collection) { + return new PersistableEntityModel<>(source, null, collection); + } + + static PersistableEntityModel of(T source, Document target, String collection) { + return new PersistableEntityModel<>(source, target, collection); + } + + PersistableEntityModel mutate(T source) { + return new PersistableEntityModel(source, target, collection); + } + + PersistableEntityModel addTargetDocument(Document target) { + return new PersistableEntityModel(source, target, collection); + } + + T getSource() { + return source; + } + + @Nullable + Document getTarget() { + return target; + } + + String getCollection() { + return collection; + } + } + + @FunctionalInterface + interface CountExecution { + Mono countDocuments(String collection, Document filter, CountOptions options); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperation.java index 6f46be917c..dd515cb37c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,7 +18,9 @@ import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Contract; import com.mongodb.client.result.DeleteResult; @@ -55,20 +57,26 @@ public interface ReactiveRemoveOperation { ReactiveRemove remove(Class domainType); /** - * Compose remove execution by calling one of the terminating methods. + * @author Christoph Strobl + * @since 5.0 */ - interface TerminatingRemove { + interface TerminatingResults { /** - * Remove all documents matching. + * Map the query result to a different type using {@link QueryResultConverter}. * - * @return {@link Mono} emitting the {@link DeleteResult}. Never {@literal null}. + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link ExecutableFindOperation.TerminatingResults}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since 5.0 */ - Mono all(); + @Contract("_ -> new") + TerminatingResults map(QueryResultConverter converter); /** * Remove and return all matching documents.
          - * NOTE The entire list of documents will be fetched before sending the actual delete commands. + * NOTE: The entire list of documents will be fetched before sending the actual delete commands. * Also, {@link org.springframework.context.ApplicationEvent}s will be published for each and every delete * operation. * @@ -77,6 +85,20 @@ interface TerminatingRemove { Flux findAndRemove(); } + /** + * Compose remove execution by calling one of the terminating methods. + */ + interface TerminatingRemove extends TerminatingResults { + + /** + * Remove all documents matching. + * + * @return {@link Mono} emitting the {@link DeleteResult}. Never {@literal null}. + */ + Mono all(); + + } + /** * Collection override (optional). */ @@ -106,6 +128,18 @@ interface RemoveWithQuery extends TerminatingRemove { * @throws IllegalArgumentException if query is {@literal null}. */ TerminatingRemove matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link TerminatingRemove}. + * @throws IllegalArgumentException if query is {@literal null}. + * @since 3.0 + */ + default TerminatingRemove matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } } interface ReactiveRemove extends RemoveWithCollection {} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupport.java index b94440555e..f77b5296d7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveRemoveOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,10 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.query.Query; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -35,62 +32,58 @@ * @author Christoph Strobl * @since 2.0 */ -@RequiredArgsConstructor class ReactiveRemoveOperationSupport implements ReactiveRemoveOperation { private static final Query ALL_QUERY = new Query(); - private final @NonNull ReactiveMongoTemplate tempate; + private final ReactiveMongoTemplate template; + + ReactiveRemoveOperationSupport(ReactiveMongoTemplate template) { + this.template = template; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation#remove(java.lang.Class) - */ @Override public ReactiveRemove remove(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); - return new ReactiveRemoveSupport<>(tempate, domainType, ALL_QUERY, null); + return new ReactiveRemoveSupport<>(template, domainType, ALL_QUERY, null, QueryResultConverter.entity()); } - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) - static class ReactiveRemoveSupport implements ReactiveRemove, RemoveWithCollection { + static class ReactiveRemoveSupport implements ReactiveRemove, RemoveWithCollection { + + private final ReactiveMongoTemplate template; + private final Class domainType; + private final Query query; + private final @Nullable String collection; + private final QueryResultConverter resultConverter; + + ReactiveRemoveSupport(ReactiveMongoTemplate template, Class domainType, Query query, @Nullable String collection, + QueryResultConverter resultConverter) { - @NonNull ReactiveMongoTemplate template; - @NonNull Class domainType; - Query query; - String collection; + this.template = template; + this.domainType = domainType; + this.query = query; + this.collection = collection; + this.resultConverter = resultConverter; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation.RemoveWithCollection#inCollection(String) - */ @Override public RemoveWithQuery inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); - return new ReactiveRemoveSupport<>(template, domainType, query, collection); + return new ReactiveRemoveSupport<>(template, domainType, query, collection, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation.RemoveWithQuery#matching(org.springframework.data.mongodb.core.Query) - */ @Override public TerminatingRemove matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); - return new ReactiveRemoveSupport<>(template, domainType, query, collection); + return new ReactiveRemoveSupport<>(template, domainType, query, collection, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation.TerminatingRemove#all() - */ @Override public Mono all() { @@ -99,20 +92,22 @@ public Mono all() { return template.doRemove(collectionName, query, domainType); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveRemoveOperation.TerminatingRemove#findAndRemove() - */ @Override public Flux findAndRemove() { String collectionName = getCollectionName(); - return template.doFindAndDelete(collectionName, query, domainType); + return template.doFindAndDelete(collectionName, query, domainType, resultConverter); + } + + @Override + @SuppressWarnings({ "unchecked", "rawtypes" }) + public TerminatingResults map(QueryResultConverter converter) { + return new ReactiveRemoveSupport<>(template, (Class) domainType, query, collection, converter); } private String getCollectionName() { - return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType); + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java index 253ae13cdf..aeb0e88e24 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -32,7 +32,7 @@ public interface ReactiveSessionCallback { /** * Execute operations against a MongoDB instance via session bound {@link ReactiveMongoOperations}. The session is * inferred directly into the operation so that no further interaction is necessary. - *

          + *
          * Please note that only Spring Data-specific abstractions like {@link ReactiveMongoOperations#find(Query, Class)} and * others are enhanced with the {@link com.mongodb.session.ClientSession}. When obtaining plain MongoDB gateway * objects like {@link com.mongodb.reactivestreams.client.MongoCollection} or diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java index 3e15682769..f0ffc1ba60 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveSessionScoped.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -33,7 +33,7 @@ public interface ReactiveSessionScoped { /** * Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}. - *

          + *
          * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() * closed} when done. * @@ -47,7 +47,7 @@ default Flux execute(ReactiveSessionCallback action) { /** * Executes the given {@link ReactiveSessionCallback} within the {@link com.mongodb.session.ClientSession}. - *

          + *
          * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() * closed} when done. * diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperation.java index 75ed0af74c..c9f92029cc 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,10 +15,14 @@ */ package org.springframework.data.mongodb.core; +import org.jetbrains.annotations.Contract; import reactor.core.publisher.Mono; +import org.springframework.data.mongodb.core.aggregation.AggregationUpdate; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import com.mongodb.client.result.UpdateResult; @@ -61,6 +65,18 @@ public interface ReactiveUpdateOperation { */ interface TerminatingFindAndModify { + /** + * Map the query result to a different type using {@link QueryResultConverter}. + * + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link TerminatingFindAndModify}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since 5.0 + */ + @Contract("_ -> new") + TerminatingFindAndModify map(QueryResultConverter converter); + /** * Find, modify and return the first matching document. * @@ -69,13 +85,42 @@ interface TerminatingFindAndModify { Mono findAndModify(); } + /** + * Trigger replaceOne + * execution by calling one of the terminating methods. + * + * @author Christoph Strobl + * @since 4.2 + */ + interface TerminatingReplace { + + /** + * Find first and replace/upsert. + * + * @return never {@literal null}. + */ + Mono replaceFirst(); + } + /** * Compose findAndReplace execution by calling one of the terminating methods. * * @author Mark Paluch * @since 2.1 */ - interface TerminatingFindAndReplace { + interface TerminatingFindAndReplace extends TerminatingReplace { + + /** + * Map the query result to a different type using {@link QueryResultConverter}. + * + * @param {@link Class type} of the result. + * @param converter the converter, must not be {@literal null}. + * @return new instance of {@link TerminatingFindAndModify}. + * @throws IllegalArgumentException if {@link QueryResultConverter converter} is {@literal null}. + * @since 5.0 + */ + @Contract("_ -> new") + TerminatingFindAndReplace map(QueryResultConverter converter); /** * Find, replace and return the first matching document. @@ -118,13 +163,16 @@ interface TerminatingUpdate extends TerminatingFindAndModify, FindAndModif interface UpdateWithUpdate { /** - * Set the {@link org.springframework.data.mongodb.core.query.Update} to be applied. + * Set the {@link UpdateDefinition} to be applied. * * @param update must not be {@literal null}. * @return new instance of {@link TerminatingUpdate}. Never {@literal null}. * @throws IllegalArgumentException if update is {@literal null}. + * @since 3.0 + * @see Update + * @see AggregationUpdate */ - TerminatingUpdate apply(org.springframework.data.mongodb.core.query.Update update); + TerminatingUpdate apply(UpdateDefinition update); /** * Specify {@code replacement} object. @@ -166,6 +214,18 @@ interface UpdateWithQuery extends UpdateWithUpdate { * @throws IllegalArgumentException if query is {@literal null}. */ UpdateWithUpdate matching(Query query); + + /** + * Set the filter {@link CriteriaDefinition criteria} to be used. + * + * @param criteria must not be {@literal null}. + * @return new instance of {@link UpdateWithUpdate}. + * @throws IllegalArgumentException if query is {@literal null}. + * @since 3.0 + */ + default UpdateWithUpdate matching(CriteriaDefinition criteria) { + return matching(Query.query(criteria)); + } } /** @@ -184,6 +244,22 @@ interface FindAndModifyWithOptions { TerminatingFindAndModify withOptions(FindAndModifyOptions options); } + /** + * @author Christoph Strobl + * @since 4.2 + */ + interface ReplaceWithOptions extends TerminatingReplace { + + /** + * Explicitly define {@link ReplaceOptions}. + * + * @param options must not be {@literal null}. + * @return new instance of {@link FindAndReplaceOptions}. + * @throws IllegalArgumentException if options is {@literal null}. + */ + TerminatingReplace withOptions(ReplaceOptions options); + } + /** * Define {@link FindAndReplaceOptions}. * @@ -191,7 +267,7 @@ interface FindAndModifyWithOptions { * @author Christoph Strobl * @since 2.1 */ - interface FindAndReplaceWithOptions extends TerminatingFindAndReplace { + interface FindAndReplaceWithOptions extends TerminatingFindAndReplace, ReplaceWithOptions { /** * Explicitly define {@link FindAndReplaceOptions} for the {@link Update}. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupport.java index 82ebcdce76..876a7a5aa2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReactiveUpdateOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,14 +15,11 @@ */ package org.springframework.data.mongodb.core; -import lombok.AccessLevel; -import lombok.NonNull; -import lombok.RequiredArgsConstructor; -import lombok.experimental.FieldDefaults; import reactor.core.publisher.Mono; +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.lang.Nullable; +import org.springframework.data.mongodb.core.query.UpdateDefinition; import org.springframework.util.Assert; import org.springframework.util.StringUtils; @@ -35,188 +32,192 @@ * @author Christoph Strobl * @since 2.0 */ -@RequiredArgsConstructor class ReactiveUpdateOperationSupport implements ReactiveUpdateOperation { private static final Query ALL_QUERY = new Query(); - private final @NonNull ReactiveMongoTemplate template; + private final ReactiveMongoTemplate template; + + ReactiveUpdateOperationSupport(ReactiveMongoTemplate template) { + this.template = template; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation#update(java.lang.Class) - */ @Override public ReactiveUpdate update(Class domainType) { - Assert.notNull(domainType, "DomainType must not be null!"); + Assert.notNull(domainType, "DomainType must not be null"); - return new ReactiveUpdateSupport<>(template, domainType, ALL_QUERY, null, null, null, null, null, domainType); + return new ReactiveUpdateSupport<>(template, domainType, ALL_QUERY, null, null, null, null, null, domainType, QueryResultConverter.entity()); } - @RequiredArgsConstructor - @FieldDefaults(level = AccessLevel.PRIVATE, makeFinal = true) - static class ReactiveUpdateSupport + static class ReactiveUpdateSupport implements ReactiveUpdate, UpdateWithCollection, UpdateWithQuery, TerminatingUpdate, FindAndReplaceWithOptions, FindAndReplaceWithProjection, TerminatingFindAndReplace { - @NonNull ReactiveMongoTemplate template; - @NonNull Class domainType; - Query query; - org.springframework.data.mongodb.core.query.Update update; - @Nullable String collection; - @Nullable FindAndModifyOptions findAndModifyOptions; - @Nullable FindAndReplaceOptions findAndReplaceOptions; - @Nullable Object replacement; - @NonNull Class targetType; - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithUpdate#apply(org.springframework.data.mongodb.core.query.Update) - */ + private final ReactiveMongoTemplate template; + private final Class domainType; + private final Query query; + private final org.springframework.data.mongodb.core.query.@Nullable UpdateDefinition update; + private final @Nullable String collection; + private final @Nullable FindAndModifyOptions findAndModifyOptions; + private final @Nullable FindAndReplaceOptions findAndReplaceOptions; + private final @Nullable Object replacement; + private final Class targetType; + private final QueryResultConverter resultConverter; + + ReactiveUpdateSupport(ReactiveMongoTemplate template, Class domainType, Query query, @Nullable UpdateDefinition update, + @Nullable String collection, @Nullable FindAndModifyOptions findAndModifyOptions, @Nullable FindAndReplaceOptions findAndReplaceOptions, + @Nullable Object replacement, Class targetType, QueryResultConverter resultConverter) { + + this.template = template; + this.domainType = domainType; + this.query = query; + this.update = update; + this.collection = collection; + this.findAndModifyOptions = findAndModifyOptions; + this.findAndReplaceOptions = findAndReplaceOptions; + this.replacement = replacement; + this.targetType = targetType; + this.resultConverter = resultConverter; + } + @Override - public TerminatingUpdate apply(org.springframework.data.mongodb.core.query.Update update) { + public TerminatingUpdate apply(org.springframework.data.mongodb.core.query.UpdateDefinition update) { - Assert.notNull(update, "Update must not be null!"); + Assert.notNull(update, "Update must not be null"); return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, - findAndReplaceOptions, replacement, targetType); + findAndReplaceOptions, replacement, targetType, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithCollection#inCollection(java.lang.String) - */ @Override public UpdateWithQuery inCollection(String collection) { - Assert.hasText(collection, "Collection must not be null nor empty!"); + Assert.hasText(collection, "Collection must not be null nor empty"); return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, - findAndReplaceOptions, replacement, targetType); + findAndReplaceOptions, replacement, targetType, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingUpdate#first() - */ @Override public Mono first() { return doUpdate(false, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingUpdate#upsert() - */ @Override public Mono upsert() { return doUpdate(true, true); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingFindAndModify#findAndModify() - */ @Override + @SuppressWarnings({"unchecked", "rawtypes", "NullAway"}) public Mono findAndModify() { String collectionName = getCollectionName(); - return template.findAndModify(query, update, findAndModifyOptions, targetType, collectionName); + return template.findAndModify(query, update, + findAndModifyOptions != null ? findAndModifyOptions : FindAndModifyOptions.none(), (Class) targetType, + collectionName, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingFindAndReplace#findAndReplace() - */ @Override + @SuppressWarnings({"unchecked","rawtypes"}) public Mono findAndReplace() { + + Assert.notNull(replacement, "Replacement must be set first"); + return template.findAndReplace(query, replacement, - findAndReplaceOptions != null ? findAndReplaceOptions : new FindAndReplaceOptions(), (Class) domainType, - getCollectionName(), targetType); + findAndReplaceOptions != null ? findAndReplaceOptions : FindAndReplaceOptions.none(), (Class) domainType, + getCollectionName(), targetType, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithQuery#matching(org.springframework.data.mongodb.core.Query) - */ @Override public UpdateWithUpdate matching(Query query) { - Assert.notNull(query, "Query must not be null!"); + Assert.notNull(query, "Query must not be null"); return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, - findAndReplaceOptions, replacement, targetType); + findAndReplaceOptions, replacement, targetType, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.TerminatingUpdate#all() - */ @Override public Mono all() { return doUpdate(true, false); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.FindAndModifyWithOptions#withOptions(org.springframework.data.mongodb.core.FindAndModifyOptions) - */ @Override public TerminatingFindAndModify withOptions(FindAndModifyOptions options) { - Assert.notNull(options, "Options must not be null!"); + Assert.notNull(options, "Options must not be null"); return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, options, - findAndReplaceOptions, replacement, targetType); + findAndReplaceOptions, replacement, targetType, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.UpdateWithUpdate#replaceWith(java.lang.Object) - */ @Override public FindAndReplaceWithProjection replaceWith(T replacement) { - Assert.notNull(replacement, "Replacement must not be null!"); + Assert.notNull(replacement, "Replacement must not be null"); return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, - findAndReplaceOptions, replacement, targetType); + findAndReplaceOptions, replacement, targetType, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.FindAndReplaceWithOptions#withOptions(org.springframework.data.mongodb.core.FindAndReplaceOptions) - */ @Override public FindAndReplaceWithProjection withOptions(FindAndReplaceOptions options) { - Assert.notNull(options, "Options must not be null!"); + Assert.notNull(options, "Options must not be null"); return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, options, - replacement, targetType); + replacement, targetType, resultConverter); + } + + @Override + public TerminatingReplace withOptions(ReplaceOptions options) { + + FindAndReplaceOptions target = new FindAndReplaceOptions(); + if (options.isUpsert()) { + target.upsert(); + } + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + target, replacement, targetType, resultConverter); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.ReactiveUpdateOperation.FindAndReplaceWithProjection#as(java.lang.Class) - */ @Override public FindAndReplaceWithOptions as(Class resultType) { - Assert.notNull(resultType, "ResultType must not be null!"); + Assert.notNull(resultType, "ResultType must not be null"); return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, - findAndReplaceOptions, replacement, resultType); + findAndReplaceOptions, replacement, resultType, QueryResultConverter.entity()); + } + + @Override + public ReactiveUpdateSupport map(QueryResultConverter converter) { + return new ReactiveUpdateSupport<>(template, domainType, query, update, collection, findAndModifyOptions, + findAndReplaceOptions, replacement, targetType, this.resultConverter.andThen(converter)); + } + + @Override + @SuppressWarnings("NullAway") + public Mono replaceFirst() { + + if (replacement != null) { + return template.replace(query, domainType, replacement, + findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName()); + } + + return template.replace(query, domainType, update, + findAndReplaceOptions != null ? findAndReplaceOptions : ReplaceOptions.none(), getCollectionName()); } + @SuppressWarnings("NullAway") private Mono doUpdate(boolean multi, boolean upsert) { return template.doUpdate(getCollectionName(), query, update, domainType, upsert, multi); } private String getCollectionName() { - return StringUtils.hasText(collection) ? collection : template.determineCollectionName(domainType); + return StringUtils.hasText(collection) ? collection : template.getCollectionName(domainType); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadConcernAware.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadConcernAware.java new file mode 100644 index 0000000000..7a7e5fdfb2 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadConcernAware.java @@ -0,0 +1,46 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.jspecify.annotations.Nullable; + +import com.mongodb.ReadConcern; + +/** + * Interface to be implemented by any object that wishes to expose the {@link ReadConcern}. + *

          + * Typically implemented by cursor or query preparer objects. + * + * @author Mark Paluch + * @since 4.1 + * @see org.springframework.data.mongodb.core.query.Query + * @see org.springframework.data.mongodb.core.aggregation.AggregationOptions + */ +public interface ReadConcernAware { + + /** + * @return {@literal true} if a {@link ReadConcern} is set. + */ + default boolean hasReadConcern() { + return getReadConcern() != null; + } + + /** + * @return the {@link ReadConcern} to apply or {@literal null} if none set. + */ + @Nullable + ReadConcern getReadConcern(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadPreferenceAware.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadPreferenceAware.java new file mode 100644 index 0000000000..e6f3fc0daf --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReadPreferenceAware.java @@ -0,0 +1,47 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.jspecify.annotations.Nullable; + +import com.mongodb.ReadPreference; + +/** + * Interface to be implemented by any object that wishes to expose the {@link ReadPreference}. + *

          + * Typically implemented by cursor or query preparer objects. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 2.2 + * @see org.springframework.data.mongodb.core.query.Query + * @see org.springframework.data.mongodb.core.aggregation.AggregationOptions + */ +public interface ReadPreferenceAware { + + /** + * @return {@literal true} if a {@link ReadPreference} is set. + */ + default boolean hasReadPreference() { + return getReadPreference() != null; + } + + /** + * @return the {@link ReadPreference} to apply or {@literal null} if none set. + */ + @Nullable + ReadPreference getReadPreference(); +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReplaceOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReplaceOptions.java new file mode 100644 index 0000000000..a487cde669 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ReplaceOptions.java @@ -0,0 +1,89 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.lang.Contract; + +/** + * Options for {@link org.springframework.data.mongodb.core.MongoOperations#replace(Query, Object) replace operations}. Defaults to + *

          + *
          upsert
          + *
          false
          + *
          + * + * @author Jakub Zurawa + * @author Christoph Strob + * @since 4.2 + */ +public class ReplaceOptions { + + private boolean upsert; + + private static final ReplaceOptions NONE = new ReplaceOptions() { + + private static final String ERROR_MSG = "ReplaceOptions.none() cannot be changed; Please use ReplaceOptions.options() instead"; + + @Override + public ReplaceOptions upsert() { + throw new UnsupportedOperationException(ERROR_MSG); + } + }; + + /** + * Static factory method to create a {@link ReplaceOptions} instance. + *
          + *
          upsert
          + *
          false
          + *
          + * + * @return new instance of {@link ReplaceOptions}. + */ + public static ReplaceOptions replaceOptions() { + return new ReplaceOptions(); + } + + /** + * Static factory method returning an unmodifiable {@link ReplaceOptions} instance. + * + * @return unmodifiable {@link ReplaceOptions} instance. + */ + public static ReplaceOptions none() { + return NONE; + } + + /** + * Insert a new document if not exists. + * + * @return this. + */ + @Contract("-> this") + public ReplaceOptions upsert() { + + this.upsert = true; + return this; + } + + /** + * Get the bit indicating if to create a new document if not exists. + * + * @return {@literal true} if set. + */ + public boolean isUpsert() { + return upsert; + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java index d847eda669..2ec71b415a 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScriptOperations.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,23 +17,24 @@ import java.util.Set; +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.script.ExecutableMongoScript; import org.springframework.data.mongodb.core.script.NamedMongoScript; -import org.springframework.lang.Nullable; -import com.mongodb.DB; /** - * Script operations on {@link com.mongodb.DB} level. Allows interaction with server side JavaScript functions. + * Script operations on {@link com.mongodb.client.MongoDatabase} level. Allows interaction with server side JavaScript functions. * * @author Christoph Strobl * @author Oliver Gierke * @since 1.7 + * @deprecated since 2.2. The {@code eval} command has been removed without replacement in MongoDB Server 4.2.0. */ +@Deprecated public interface ScriptOperations { /** - * Store given {@link ExecutableMongoScript} generating a syntheitcal name so that it can be called by it + * Store given {@link ExecutableMongoScript} generating a synthetic name so that it can be called by it * subsequently. * * @param script must not be {@literal null}. @@ -71,10 +72,10 @@ public interface ScriptOperations { Object call(String scriptName, Object... args); /** - * Checks {@link DB} for existence of {@link ServerSideJavaScript} with given name. + * Checks {@link com.mongodb.client.MongoDatabase} for existence of {@literal ServerSideJavaScript} with given name. * * @param scriptName must not be {@literal null} or empty. - * @return false if no {@link ServerSideJavaScript} with given name exists. + * @return false if no {@literal ServerSideJavaScript} with given name exists. */ boolean exists(String scriptName); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScrollUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScrollUtils.java new file mode 100644 index 0000000000..62e6d6c513 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ScrollUtils.java @@ -0,0 +1,276 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.function.IntFunction; + +import org.bson.BsonNull; +import org.bson.Document; +import org.springframework.data.domain.KeysetScrollPosition; +import org.springframework.data.domain.ScrollPosition; +import org.springframework.data.domain.ScrollPosition.Direction; +import org.springframework.data.domain.Window; +import org.springframework.data.mongodb.core.EntityOperations.Entity; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.util.Assert; + +/** + * Utilities to run scroll queries and create {@link Window} results. + * + * @author Mark Paluch + * @author Christoph Strobl + * @since 4.1 + */ +class ScrollUtils { + + /** + * Create the actual query to run keyset-based pagination. Affects projection, sorting, and the criteria. + * + * @param query + * @param idPropertyName + * @return + */ + static KeysetScrollQuery createKeysetPaginationQuery(Query query, String idPropertyName) { + + + KeysetScrollPosition keyset = query.getKeyset(); + + Assert.notNull(keyset, "Query.keyset must not be null"); + + KeysetScrollDirector director = KeysetScrollDirector.of(keyset.getDirection()); + Document sortObject = director.getSortObject(idPropertyName, query); + Document fieldsObject = director.getFieldsObject(query.getFieldsObject(), sortObject); + Document queryObject = director.createQuery(keyset, query.getQueryObject(), sortObject); + + return new KeysetScrollQuery(queryObject, fieldsObject, sortObject); + } + + static Window createWindow(Query query, List result, Class sourceType, EntityOperations operations) { + + Document sortObject = query.getSortObject(); + KeysetScrollPosition keyset = query.getKeyset(); + + Assert.notNull(keyset, "Query.keyset must not be null"); + + Direction direction = keyset.getDirection(); + KeysetScrollDirector director = KeysetScrollDirector.of(direction); + + List resultsToUse = director.postPostProcessResults(result, query.getLimit()); + + IntFunction positionFunction = value -> { + + T last = resultsToUse.get(value); + Entity entity = operations.forEntity(last); + + Map keys = entity.extractKeys(sortObject, sourceType); + return ScrollPosition.of(keys, direction); + }; + + return Window.from(resultsToUse, positionFunction, hasMoreElements(result, query.getLimit())); + } + + static Window createWindow(List result, int limit, IntFunction positionFunction) { + return Window.from(getSubList(result, limit), positionFunction, hasMoreElements(result, limit)); + } + + static boolean hasMoreElements(List result, int limit) { + return !result.isEmpty() && result.size() > limit; + } + + static List getSubList(List result, int limit) { + + if (limit > 0 && result.size() > limit) { + return result.subList(0, limit); + } + + return result; + } + + record KeysetScrollQuery(Document query, Document fields, Document sort) { + + } + + /** + * Director for keyset scrolling. + */ + static class KeysetScrollDirector { + + private static final KeysetScrollDirector FORWARD = new KeysetScrollDirector(); + private static final KeysetScrollDirector REVERSE = new ReverseKeysetScrollDirector(); + + /** + * Factory method to obtain the right {@link KeysetScrollDirector}. + * + * @param direction + * @return + */ + public static KeysetScrollDirector of(ScrollPosition.Direction direction) { + return direction == Direction.FORWARD ? FORWARD : REVERSE; + } + + public Document getSortObject(String idPropertyName, Query query) { + + Document sortObject = query.isSorted() ? query.getSortObject() : new Document(); + sortObject.put(idPropertyName, 1); + + return sortObject; + } + + public Document getFieldsObject(Document fieldsObject, Document sortObject) { + + // make sure we can extract the keyset + if (!fieldsObject.isEmpty()) { + for (String field : sortObject.keySet()) { + fieldsObject.put(field, 1); + } + } + + return fieldsObject; + } + + public Document createQuery(KeysetScrollPosition keyset, Document queryObject, Document sortObject) { + + Map keysetValues = keyset.getKeys(); + List or = (List) queryObject.getOrDefault("$or", new ArrayList<>()); + List sortKeys = new ArrayList<>(sortObject.keySet()); + + // first query doesn't come with a keyset + if (keysetValues.isEmpty()) { + return queryObject; + } + + if (!keysetValues.keySet().containsAll(sortKeys)) { + throw new IllegalStateException("KeysetScrollPosition does not contain all keyset values"); + } + + // build matrix query for keyset paging that contains sort^2 queries + // reflecting a query that follows sort order semantics starting from the last returned keyset + for (int i = 0; i < sortKeys.size(); i++) { + + Document sortConstraint = new Document(); + + for (int j = 0; j < sortKeys.size(); j++) { + + String sortSegment = sortKeys.get(j); + int sortOrder = sortObject.getInteger(sortSegment); + Object o = keysetValues.get(sortSegment); + + if (j >= i) { // tail segment + if (o instanceof BsonNull) { + throw new IllegalStateException( + "Cannot resume from KeysetScrollPosition. Offending key: '%s' is 'null'".formatted(sortSegment)); + } + sortConstraint.put(sortSegment, new Document(getComparator(sortOrder), o)); + break; + } + + sortConstraint.put(sortSegment, o); + } + + if (!sortConstraint.isEmpty()) { + or.add(sortConstraint); + } + } + + if (!or.isEmpty()) { + queryObject.put("$or", or); + } + + return queryObject; + } + + protected String getComparator(int sortOrder) { + return sortOrder == 1 ? "$gt" : "$lt"; + } + + protected List postPostProcessResults(List list, int limit) { + return getFirst(limit, list); + } + + } + + /** + * Reverse scrolling director variant applying {@link KeysetScrollPosition.Direction#BACKWARD}. In reverse scrolling, + * we need to flip directions for the actual query so that we do not get everything from the top position and apply + * the limit but rather flip the sort direction, apply the limit and then reverse the result to restore the actual + * sort order. + */ + private static class ReverseKeysetScrollDirector extends KeysetScrollDirector { + + @Override + public Document getSortObject(String idPropertyName, Query query) { + + Document sortObject = super.getSortObject(idPropertyName, query); + + // flip sort direction for backward scrolling + + for (String field : sortObject.keySet()) { + sortObject.put(field, sortObject.getInteger(field) == 1 ? -1 : 1); + } + + return sortObject; + } + + @Override + public List postPostProcessResults(List list, int limit) { + + // flip direction of the result list as we need to accomodate for the flipped sort order for proper offset + // querying. + Collections.reverse(list); + + return getLast(limit, list); + } + + } + + /** + * Return the first {@code count} items from the list. + * + * @param count + * @param list + * @return + * @param + */ + static List getFirst(int count, List list) { + + if (count > 0 && list.size() > count) { + return list.subList(0, count); + } + + return list; + } + + /** + * Return the last {@code count} items from the list. + * + * @param count + * @param list + * @return + * @param + */ + static List getLast(int count, List list) { + + if (count > 0 && list.size() > count) { + return list.subList(list.size() - count, list.size()); + } + + return list; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java index 2f389219d2..76a6d525f8 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionCallback.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,8 +15,8 @@ */ package org.springframework.data.mongodb.core; +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.query.Query; -import org.springframework.lang.Nullable; /** * Callback interface for executing operations within a {@link com.mongodb.session.ClientSession}. @@ -31,7 +31,7 @@ public interface SessionCallback { /** * Execute operations against a MongoDB instance via session bound {@link MongoOperations}. The session is inferred * directly into the operation so that no further interaction is necessary. - *

          + *
          * Please note that only Spring Data-specific abstractions like {@link MongoOperations#find(Query, Class)} and others * are enhanced with the {@link com.mongodb.session.ClientSession}. When obtaining plain MongoDB gateway objects like * {@link com.mongodb.client.MongoCollection} or {@link com.mongodb.client.MongoDatabase} via eg. diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java index 0a1c218ca2..906d682685 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SessionScoped.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,13 +17,13 @@ import java.util.function.Consumer; -import org.springframework.lang.Nullable; - import com.mongodb.client.ClientSession; +import org.jspecify.annotations.Nullable; + /** * Gateway interface to execute {@link ClientSession} bound operations against MongoDB via a {@link SessionCallback}. - *

          + *
          * The very same bound {@link ClientSession} is used for all invocations of {@code execute} on the instance. * * @author Christoph Strobl @@ -34,7 +34,7 @@ public interface SessionScoped { /** * Executes the given {@link SessionCallback} within the {@link com.mongodb.session.ClientSession}. - *

          + *
          * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() * closed} when done. * @@ -42,14 +42,13 @@ public interface SessionScoped { * @param return type. * @return a result object returned by the action. Can be {@literal null}. */ - @Nullable - default T execute(SessionCallback action) { + default @Nullable T execute(SessionCallback action) { return execute(action, session -> {}); } /** * Executes the given {@link SessionCallback} within the {@link com.mongodb.session.ClientSession}. - *

          + *
          * It is up to the caller to make sure the {@link com.mongodb.session.ClientSession} is {@link ClientSession#close() * closed} when done. * @@ -60,6 +59,5 @@ default T execute(SessionCallback action) { * @param return type. * @return a result object returned by the action. Can be {@literal null}. */ - @Nullable - T execute(SessionCallback action, Consumer doFinally); + @Nullable T execute(SessionCallback action, Consumer doFinally); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDbFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactory.java similarity index 56% rename from spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDbFactory.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactory.java index bec369b90a..2b51b5e077 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDbFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoClientDatabaseFactory.java @@ -1,11 +1,11 @@ /* - * Copyright 2018 the original author or authors. + * Copyright 2018-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,7 +19,6 @@ import com.mongodb.ClientSessionOptions; import com.mongodb.ConnectionString; -import com.mongodb.DB; import com.mongodb.client.ClientSession; import com.mongodb.client.MongoClient; import com.mongodb.client.MongoClients; @@ -29,86 +28,63 @@ * Factory to create {@link MongoDatabase} instances from a {@link MongoClient} instance. * * @author Christoph Strobl - * @since 2.1 + * @since 3.0 */ -public class SimpleMongoClientDbFactory extends MongoDbFactorySupport implements DisposableBean { +public class SimpleMongoClientDatabaseFactory extends MongoDatabaseFactorySupport + implements DisposableBean { /** - * Creates a new {@link SimpleMongoClientDbFactory} instance for the given {@code connectionString}. + * Creates a new {@link SimpleMongoClientDatabaseFactory} instance for the given {@code connectionString}. * * @param connectionString connection coordinates for a database connection. Must contain a database name and must not * be {@literal null} or empty. * @see MongoDB Connection String reference */ - public SimpleMongoClientDbFactory(String connectionString) { + public SimpleMongoClientDatabaseFactory(String connectionString) { this(new ConnectionString(connectionString)); } /** - * Creates a new {@link SimpleMongoClientDbFactory} instance from the given {@link MongoClient}. + * Creates a new {@link SimpleMongoClientDatabaseFactory} instance from the given {@link MongoClient}. * * @param connectionString connection coordinates for a database connection. Must contain also a database name and not * be {@literal null}. */ - public SimpleMongoClientDbFactory(ConnectionString connectionString) { + public SimpleMongoClientDatabaseFactory(ConnectionString connectionString) { this(MongoClients.create(connectionString), connectionString.getDatabase(), true); } /** - * Creates a new {@link SimpleMongoClientDbFactory} instance from the given {@link MongoClient}. + * Creates a new {@link SimpleMongoClientDatabaseFactory} instance from the given {@link MongoClient}. * * @param mongoClient must not be {@literal null}. * @param databaseName must not be {@literal null} or empty. */ - public SimpleMongoClientDbFactory(MongoClient mongoClient, String databaseName) { + public SimpleMongoClientDatabaseFactory(MongoClient mongoClient, String databaseName) { this(mongoClient, databaseName, false); } /** - * Creates a new {@link SimpleMongoClientDbFactory} instance from the given {@link MongoClient}. + * Creates a new {@link SimpleMongoClientDatabaseFactory} instance from the given {@link MongoClient}. * * @param mongoClient must not be {@literal null}. * @param databaseName must not be {@literal null} or empty. * @param mongoInstanceCreated */ - private SimpleMongoClientDbFactory(MongoClient mongoClient, String databaseName, boolean mongoInstanceCreated) { - super(mongoClient, databaseName, mongoInstanceCreated, new MongoExceptionTranslator()); + SimpleMongoClientDatabaseFactory(MongoClient mongoClient, String databaseName, boolean mongoInstanceCreated) { + super(mongoClient, databaseName, mongoInstanceCreated, MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getLegacyDb() - */ - @Override - public DB getLegacyDb() { - - throw new UnsupportedOperationException(String.format( - "%s does not support legacy DBObject API! Please consider using SimpleMongoDbFactory for that purpose.", - MongoClient.class)); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions) - */ @Override public ClientSession getSession(ClientSessionOptions options) { return getMongoClient().startSession(options); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoDbFactoryBase#closeClient() - */ @Override protected void closeClient() { getMongoClient().close(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoDbFactoryBase#doGetMongoDatabase(java.lang.String) - */ @Override protected MongoDatabase doGetMongoDatabase(String dbName) { return getMongoClient().getDatabase(dbName); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoDbFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoDbFactory.java deleted file mode 100644 index 2d1d6f2ec6..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleMongoDbFactory.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright 2011-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core; - -import org.springframework.beans.factory.DisposableBean; - -import com.mongodb.ClientSessionOptions; -import com.mongodb.DB; -import com.mongodb.MongoClient; -import com.mongodb.MongoClientURI; -import com.mongodb.WriteConcern; -import com.mongodb.client.ClientSession; -import com.mongodb.client.MongoCollection; -import com.mongodb.client.MongoDatabase; - -/** - * Factory to create {@link MongoDatabase} instances from a {@link MongoClient} instance. - * - * @author Mark Pollack - * @author Oliver Gierke - * @author Thomas Darimont - * @author Christoph Strobl - * @author George Moraitis - * @author Mark Paluch - */ -public class SimpleMongoDbFactory extends MongoDbFactorySupport implements DisposableBean { - - /** - * Creates a new {@link SimpleMongoDbFactory} instance from the given {@link MongoClientURI}. - * - * @param uri coordinates for a database connection. Must contain a database name and must not be {@literal null}. - * @since 1.7 - */ - public SimpleMongoDbFactory(MongoClientURI uri) { - this(new MongoClient(uri), uri.getDatabase(), true); - } - - /** - * Creates a new {@link SimpleMongoDbFactory} instance from the given {@link MongoClient}. - * - * @param mongoClient must not be {@literal null}. - * @param databaseName must not be {@literal null} or empty. - * @since 1.7 - */ - public SimpleMongoDbFactory(MongoClient mongoClient, String databaseName) { - this(mongoClient, databaseName, false); - } - - /** - * @param mongoClient - * @param databaseName - * @param mongoInstanceCreated - * @since 1.7 - */ - private SimpleMongoDbFactory(MongoClient mongoClient, String databaseName, boolean mongoInstanceCreated) { - super(mongoClient, databaseName, mongoInstanceCreated, new MongoExceptionTranslator()); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getLegacyDb() - */ - @Override - public DB getLegacyDb() { - return getMongoClient().getDB(getDefaultDatabaseName()); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.MongoDbFactory#getSession(com.mongodb.ClientSessionOptions) - */ - @Override - public ClientSession getSession(ClientSessionOptions options) { - return getMongoClient().startSession(options); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoDbFactoryBase#closeClient() - */ - @Override - protected void closeClient() { - getMongoClient().close(); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.MongoDbFactoryBase#doGetMongoDatabase(java.lang.String) - */ - @Override - protected MongoDatabase doGetMongoDatabase(String dbName) { - return getMongoClient().getDatabase(dbName); - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java index 71d73a442a..529f912e6c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SimpleReactiveMongoDatabaseFactory.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,18 @@ */ package org.springframework.data.mongodb.core; -import lombok.Value; import reactor.core.publisher.Mono; +import org.bson.codecs.configuration.CodecRegistry; +import org.jspecify.annotations.Nullable; import org.springframework.aop.framework.ProxyFactory; import org.springframework.beans.factory.DisposableBean; import org.springframework.dao.DataAccessException; import org.springframework.dao.support.PersistenceExceptionTranslator; import org.springframework.data.mongodb.ReactiveMongoDatabaseFactory; import org.springframework.data.mongodb.SessionAwareMethodInterceptor; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; import com.mongodb.ClientSessionOptions; import com.mongodb.ConnectionString; @@ -41,6 +42,7 @@ * * @author Mark Paluch * @author Christoph Strobl + * @author Mathieu Ouellet * @since 2.0 */ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, ReactiveMongoDatabaseFactory { @@ -49,8 +51,7 @@ public class SimpleReactiveMongoDatabaseFactory implements DisposableBean, React private final String databaseName; private final boolean mongoInstanceCreated; - private final PersistenceExceptionTranslator exceptionTranslator; - + private PersistenceExceptionTranslator exceptionTranslator = MongoExceptionTranslator.DEFAULT_EXCEPTION_TRANSLATOR; private @Nullable WriteConcern writeConcern; /** @@ -75,15 +76,29 @@ public SimpleReactiveMongoDatabaseFactory(MongoClient mongoClient, String databa private SimpleReactiveMongoDatabaseFactory(MongoClient client, String databaseName, boolean mongoInstanceCreated) { - Assert.notNull(client, "MongoClient must not be null!"); - Assert.hasText(databaseName, "Database name must not be empty!"); + Assert.notNull(client, "MongoClient must not be null"); + Assert.hasText(databaseName, "Database name must not be empty"); Assert.isTrue(databaseName.matches("[^/\\\\.$\"\\s]+"), - "Database name must not contain slashes, dots, spaces, quotes, or dollar signs!"); + "Database name must not contain slashes, dots, spaces, quotes, or dollar signs"); this.mongo = client; this.databaseName = databaseName; this.mongoInstanceCreated = mongoInstanceCreated; - this.exceptionTranslator = new MongoExceptionTranslator(); + } + + /** + * Configures the {@link PersistenceExceptionTranslator} to be used. + * + * @param exceptionTranslator the exception translator to set. + * @since 4.4 + */ + public void setExceptionTranslator(PersistenceExceptionTranslator exceptionTranslator) { + this.exceptionTranslator = exceptionTranslator; + } + + @Override + public PersistenceExceptionTranslator getExceptionTranslator() { + return this.exceptionTranslator; } /** @@ -95,24 +110,22 @@ public void setWriteConcern(WriteConcern writeConcern) { this.writeConcern = writeConcern; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getMongoDatabase() - */ - public MongoDatabase getMongoDatabase() throws DataAccessException { + @Override + public Mono getMongoDatabase() throws DataAccessException { return getMongoDatabase(databaseName); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getMongoDatabase(java.lang.String) - */ - public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException { + @Override + public Mono getMongoDatabase(String dbName) throws DataAccessException { - Assert.hasText(dbName, "Database name must not be empty."); + Assert.hasText(dbName, "Database name must not be empty"); - MongoDatabase db = mongo.getDatabase(dbName); - return writeConcern != null ? db.withWriteConcern(writeConcern) : db; + return Mono.fromSupplier(() -> { + + MongoDatabase db = mongo.getDatabase(dbName); + + return writeConcern != null ? db.withWriteConcern(writeConcern) : db; + }); } /** @@ -120,6 +133,7 @@ public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException * * @see DisposableBean#destroy() */ + @Override public void destroy() throws Exception { if (mongoInstanceCreated) { @@ -127,27 +141,16 @@ public void destroy() throws Exception { } } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getExceptionTranslator() - */ - public PersistenceExceptionTranslator getExceptionTranslator() { - return this.exceptionTranslator; + @Override + public CodecRegistry getCodecRegistry() { + return this.mongo.getDatabase(databaseName).getCodecRegistry(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDbFactory#getSession(com.mongodb.ClientSessionOptions) - */ @Override public Mono getSession(ClientSessionOptions options) { return Mono.from(mongo.startSession(options)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDbFactory#withSession(com.mongodb.session.ClientSession) - */ @Override public ReactiveMongoDatabaseFactory withSession(ClientSession session) { return new ClientSessionBoundMongoDbFactory(session, this); @@ -160,57 +163,52 @@ public ReactiveMongoDatabaseFactory withSession(ClientSession session) { * @author Christoph Strobl * @since 2.1 */ - @Value - static class ClientSessionBoundMongoDbFactory implements ReactiveMongoDatabaseFactory { + static final class ClientSessionBoundMongoDbFactory implements ReactiveMongoDatabaseFactory { + + private final ClientSession session; + private final ReactiveMongoDatabaseFactory delegate; + + ClientSessionBoundMongoDbFactory(ClientSession session, ReactiveMongoDatabaseFactory delegate) { - ClientSession session; - ReactiveMongoDatabaseFactory delegate; + this.session = session; + this.delegate = delegate; + } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase() - */ @Override - public MongoDatabase getMongoDatabase() throws DataAccessException { - return decorateDatabase(delegate.getMongoDatabase()); + public Mono getMongoDatabase() throws DataAccessException { + return delegate.getMongoDatabase().map(this::decorateDatabase); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getMongoDatabase(java.lang.String) - */ @Override - public MongoDatabase getMongoDatabase(String dbName) throws DataAccessException { - return decorateDatabase(delegate.getMongoDatabase(dbName)); + public Mono getMongoDatabase(String dbName) throws DataAccessException { + return delegate.getMongoDatabase(dbName).map(this::decorateDatabase); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getExceptionTranslator() - */ @Override public PersistenceExceptionTranslator getExceptionTranslator() { return delegate.getExceptionTranslator(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#getSession(com.mongodb.ClientSessionOptions) - */ + @Override + public CodecRegistry getCodecRegistry() { + return delegate.getCodecRegistry(); + } + @Override public Mono getSession(ClientSessionOptions options) { return delegate.getSession(options); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.ReactiveMongoDatabaseFactory#withSession(com.mongodb.session.ClientSession) - */ @Override public ReactiveMongoDatabaseFactory withSession(ClientSession session) { return delegate.withSession(session); } + @Override + public boolean isTransactionActive() { + return session != null && session.hasActiveTransaction(); + } + private MongoDatabase decorateDatabase(MongoDatabase database) { return createProxyInstance(session, database, MongoDatabase.class); } @@ -233,7 +231,42 @@ private T createProxyInstance(com.mongodb.session.ClientSession session, T t factory.addAdvice(new SessionAwareMethodInterceptor<>(session, target, ClientSession.class, MongoDatabase.class, this::proxyDatabase, MongoCollection.class, this::proxyCollection)); - return targetType.cast(factory.getProxy()); + return targetType.cast(factory.getProxy(target.getClass().getClassLoader())); + } + + public ClientSession getSession() { + return this.session; + } + + public ReactiveMongoDatabaseFactory getDelegate() { + return this.delegate; + } + + @Override + public boolean equals(@Nullable Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + + ClientSessionBoundMongoDbFactory that = (ClientSessionBoundMongoDbFactory) o; + + if (!ObjectUtils.nullSafeEquals(this.session, that.session)) { + return false; + } + return ObjectUtils.nullSafeEquals(this.delegate, that.delegate); + } + + @Override + public int hashCode() { + int result = ObjectUtils.nullSafeHashCode(this.session); + result = 31 * result + ObjectUtils.nullSafeHashCode(this.delegate); + return result; + } + + public String toString() { + return "SimpleReactiveMongoDatabaseFactory.ClientSessionBoundMongoDbFactory(session=" + this.getSession() + + ", delegate=" + this.getDelegate() + ")"; } } } diff --git a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JmxServer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SortingQueryCursorPreparer.java similarity index 51% rename from spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JmxServer.java rename to spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SortingQueryCursorPreparer.java index 8e4a17fe5a..1652dca259 100644 --- a/spring-data-mongodb/src/test/java/org/springframework/data/mongodb/core/JmxServer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/SortingQueryCursorPreparer.java @@ -1,11 +1,11 @@ /* - * Copyright 2002-2018 the original author or authors. + * Copyright 2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,22 +15,18 @@ */ package org.springframework.data.mongodb.core; -import org.springframework.context.support.ClassPathXmlApplicationContext; +import org.bson.Document; +import org.jspecify.annotations.Nullable; /** - * Server application than can be run as an app or unit test. + * {@link CursorPreparer} that exposes its {@link Document sort document}. * - * @author Mark Pollack - * @author Oliver Gierke + * @author Christoph Strobl + * @since 4.4.3 */ -public class JmxServer { +interface SortingQueryCursorPreparer extends CursorPreparer { - public static void main(String[] args) { - new JmxServer().run(); - } + @Nullable + Document getSortObject(); - @SuppressWarnings("resource") - public void run() { - new ClassPathXmlApplicationContext(new String[] { "infrastructure.xml", "server-jmx.xml" }); - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ViewOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ViewOptions.java new file mode 100644 index 0000000000..b4b525fc97 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/ViewOptions.java @@ -0,0 +1,67 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import java.util.Optional; + +import org.jspecify.annotations.Nullable; +import org.springframework.data.mongodb.core.query.Collation; +import org.springframework.lang.Contract; + +/** + * Immutable object holding additional options to be applied when creating a MongoDB + * views. + * + * @author Christoph Strobl + * @since 4.0 + */ +public class ViewOptions { + + private final @Nullable Collation collation; + + static ViewOptions none() { + return new ViewOptions(); + } + + /** + * Creates new instance of {@link ViewOptions}. + */ + public ViewOptions() { + this(null); + } + + private ViewOptions(@Nullable Collation collation) { + this.collation = collation; + } + + /** + * Get the {@link Collation} to be set. + * + * @return {@link Optional#empty()} if not set. + */ + public Optional getCollation() { + return Optional.ofNullable(collation); + } + + /** + * @param collation the {@link Collation} to use for language-specific string comparison. + * @return new instance of {@link ViewOptions}. + */ + @Contract("_ -> new") + public ViewOptions collation(Collation collation) { + return new ViewOptions(collation); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernAware.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernAware.java new file mode 100644 index 0000000000..bdc7de6663 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernAware.java @@ -0,0 +1,42 @@ +/* + * Copyright 2024-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core; + +import org.jspecify.annotations.Nullable; + +import com.mongodb.WriteConcern; + +/** + * Interface indicating a component that contains and exposes an {@link WriteConcern}. + * + * @author Christoph Strobl + * @since 4.3 + */ +public interface WriteConcernAware { + + /** + * @return the {@link WriteConcern} to apply or {@literal null} if none set. + */ + @Nullable + WriteConcern getWriteConcern(); + + /** + * @return {@literal true} if a {@link com.mongodb.WriteConcern} is set. + */ + default boolean hasWriteConcern() { + return getWriteConcern() != null; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernResolver.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernResolver.java index 9bae719e58..a72c656e47 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernResolver.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteConcernResolver.java @@ -1,11 +1,11 @@ /* - * Copyright 2011-2018 the original author or authors. + * Copyright 2011-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,7 @@ */ package org.springframework.data.mongodb.core; -import org.springframework.lang.Nullable; +import org.jspecify.annotations.Nullable; import com.mongodb.WriteConcern; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteResultChecking.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteResultChecking.java index 814cd8b3f9..fbefe4a075 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteResultChecking.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/WriteResultChecking.java @@ -1,11 +1,11 @@ /* - * Copyright 2012-2018 the original author or authors. + * Copyright 2012-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,8 +16,8 @@ package org.springframework.data.mongodb.core; /** - * Enum to represent how strict the check of {@link com.mongodb.WriteResult} shall be. It can either be skipped entirely - * (use {@link #NONE}) or cause an exception to be thrown {@link #EXCEPTION}. + * Enum to represent how strict the check of {@link com.mongodb.WriteConcernResult} shall be. It can either be skipped + * entirely (use {@link #NONE}) or cause an exception to be thrown {@link #EXCEPTION}. * * @author Thomas Risberg * @author Oliver Gierke diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AbstractAggregationExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AbstractAggregationExpression.java index ec0a8ff8fd..710b570ed7 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AbstractAggregationExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AbstractAggregationExpression.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,18 +17,28 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.bson.Document; + +import org.jspecify.annotations.Nullable; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Order; +import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; /** + * Support class for {@link AggregationExpression} implementations. + * * @author Christoph Strobl * @author Matt Morrissette + * @author Mark Paluch * @since 1.10 */ abstract class AbstractAggregationExpression implements AggregationExpression { @@ -39,15 +49,11 @@ protected AbstractAggregationExpression(Object value) { this.value = value; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return toDocument(this.value, context); } - @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { return new Document(getMongoMethod(), unpack(value, context)); } @@ -64,12 +70,35 @@ protected static List asFields(String... fieldRefs) { @SuppressWarnings("unchecked") private Object unpack(Object value, AggregationOperationContext context) { - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); } - if (value instanceof Field) { - return context.getReference((Field) value).toString(); + if (value instanceof Field field) { + return context.getReference(field).toString(); + } + + if (value instanceof Fields fields) { + + List mapped = new ArrayList<>(fields.size()); + + for (Field field : fields) { + mapped.add(unpack(field, context)); + } + + return mapped; + } + + if (value instanceof Sort sort) { + + Document sortDoc = new Document(); + for (Order order : sort) { + + // Check reference + FieldReference reference = context.getReference(order.getProperty()); + sortDoc.put(reference.getRaw(), order.isAscending() ? 1 : -1); + } + return sortDoc; } if (value instanceof List) { @@ -77,7 +106,9 @@ private Object unpack(Object value, AggregationOperationContext context) { List sourceList = (List) value; List mappedList = new ArrayList<>(sourceList.size()); - sourceList.stream().map((item) -> unpack(item, context)).forEach(mappedList::add); + for (Object o : sourceList) { + mappedList.add(unpack(o, context)); + } return mappedList; } @@ -92,45 +123,141 @@ private Object unpack(Object value, AggregationOperationContext context) { return targetDocument; } + if (value instanceof SystemVariable) { + return value.toString(); + } + return value; } - protected List append(Object value) { + @SuppressWarnings("unchecked") + protected List append(Object value, Expand expandList) { if (this.value instanceof List) { - List clone = new ArrayList((List) this.value); + List clone = new ArrayList<>((List) this.value); - if (value instanceof List) { - clone.addAll((List) value); + if (value instanceof Collection collection && Expand.EXPAND_VALUES.equals(expandList)) { + clone.addAll(collection); } else { clone.add(value); } + return clone; } return Arrays.asList(this.value, value); } - @SuppressWarnings("unchecked") - protected java.util.Map append(String key, Object value) { + /** + * Expand a nested list of values to single entries or keep the list. + */ + protected enum Expand { + EXPAND_VALUES, KEEP_SOURCE + } - Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map!"); + protected List append(Object value) { + return append(value, Expand.EXPAND_VALUES); + } + + @SuppressWarnings({ "unchecked" }) + protected Map append(String key, Object value) { + + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); - java.util.Map clone = new LinkedHashMap<>((java.util.Map) this.value); + return append((Map) this.value, key, value); + } + + private Map append(Map existing, String key, Object value) { + + Map clone = new LinkedHashMap<>(existing); clone.put(key, value); return clone; + } + + @SuppressWarnings("rawtypes") + protected Map appendTo(String key, Object value) { + + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); + + if (this.value instanceof Map map) { + + Map target = new HashMap<>(map); + if (!target.containsKey(key)) { + target.put(key, value); + return target; + } + target.computeIfPresent(key, (k, v) -> { + + if (v instanceof List list) { + List targetList = new ArrayList<>(list); + targetList.add(value); + return targetList; + } + return Arrays.asList(v, value); + }); + return target; + } + throw new IllegalStateException( + String.format("Cannot append value to %s type", ObjectUtils.nullSafeClassName(this.value))); + + } + + @SuppressWarnings({ "unchecked", "rawtypes" }) + protected Map remove(String key) { + + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); + + Map clone = new LinkedHashMap<>((java.util.Map) this.value); + clone.remove(key); + return clone; + } + + /** + * Append the given key at the position in the underlying {@link LinkedHashMap}. + * + * @param index + * @param key + * @param value + * @return + * @since 3.1 + */ + @SuppressWarnings({ "unchecked" }) + protected Map appendAt(int index, String key, Object value) { + + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); + + Map clone = new LinkedHashMap<>(); + + int i = 0; + for (Map.Entry entry : ((Map) this.value).entrySet()) { + + if (i == index) { + clone.put(key, value); + } + if (!entry.getKey().equals(key)) { + clone.put(entry.getKey(), entry.getValue()); + } + i++; + } + if (i <= index) { + clone.put(key, value); + } + return clone; } + @SuppressWarnings({ "rawtypes" }) protected List values() { if (value instanceof List) { return new ArrayList((List) value); } + if (value instanceof java.util.Map) { return new ArrayList(((java.util.Map) value).values()); } + return new ArrayList<>(Collections.singletonList(value)); } @@ -156,11 +283,15 @@ protected T get(int index) { * @since 2.1 */ @SuppressWarnings("unchecked") - protected T get(Object key) { + protected @Nullable T get(Object key) { - Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map!"); + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); + + return (T) ((Map) this.value).get(key); + } - return (T) ((java.util.Map) this.value).get(key); + protected boolean isArgumentMap() { + return this.value instanceof Map; } /** @@ -170,11 +301,11 @@ protected T get(Object key) { * @return */ @SuppressWarnings("unchecked") - protected java.util.Map argumentMap() { + protected Map argumentMap() { - Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map!"); + Assert.isInstanceOf(Map.class, this.value, "Value must be a type of Map"); - return Collections.unmodifiableMap((java.util.Map) value); + return Collections.unmodifiableMap((java.util.Map) value); } /** @@ -191,7 +322,7 @@ protected boolean contains(Object key) { return false; } - return ((java.util.Map) this.value).containsKey(key); + return ((Map) this.value).containsKey(key); } protected abstract String getMongoMethod(); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java index 9511991d25..fa44656c99 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AccumulatorOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,16 +15,22 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; +import java.util.Map; import org.bson.Document; +import org.jspecify.annotations.Nullable; +import org.springframework.lang.Contract; import org.springframework.util.Assert; /** * Gateway to {@literal accumulator} aggregation operations. * * @author Christoph Strobl + * @author Julia Lee * @since 1.10 * @soundtrack Rage Against The Machine - Killing In The Name */ @@ -34,7 +40,7 @@ public class AccumulatorOperators { * Take the numeric value referenced by given {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link AccumulatorOperatorFactory}. */ public static AccumulatorOperatorFactory valueOf(String fieldReference) { return new AccumulatorOperatorFactory(fieldReference); @@ -44,7 +50,7 @@ public static AccumulatorOperatorFactory valueOf(String fieldReference) { * Take the numeric value referenced resulting from given {@link AggregationExpression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link AccumulatorOperatorFactory}. */ public static AccumulatorOperatorFactory valueOf(AggregationExpression expression) { return new AccumulatorOperatorFactory(expression); @@ -52,11 +58,12 @@ public static AccumulatorOperatorFactory valueOf(AggregationExpression expressio /** * @author Christoph Strobl + * @author Julia Lee */ public static class AccumulatorOperatorFactory { - private final String fieldReference; - private final AggregationExpression expression; + private final @Nullable String fieldReference; + private final @Nullable AggregationExpression expression; /** * Creates new {@link AccumulatorOperatorFactory} for given {@literal fieldReference}. @@ -65,7 +72,7 @@ public static class AccumulatorOperatorFactory { */ public AccumulatorOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; } @@ -77,7 +84,7 @@ public AccumulatorOperatorFactory(String fieldReference) { */ public AccumulatorOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; } @@ -86,8 +93,9 @@ public AccumulatorOperatorFactory(AggregationExpression expression) { * Creates new {@link AggregationExpression} that takes the associated numeric value expression and calculates and * returns the sum. * - * @return + * @return new instance of {@link Sum}. */ + @SuppressWarnings("NullAway") public Sum sum() { return usesFieldRef() ? Sum.sumOf(fieldReference) : Sum.sumOf(expression); } @@ -96,8 +104,9 @@ public Sum sum() { * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the * average value. * - * @return + * @return new instance of {@link Avg}. */ + @SuppressWarnings("NullAway") public Avg avg() { return usesFieldRef() ? Avg.avgOf(fieldReference) : Avg.avgOf(expression); } @@ -106,28 +115,53 @@ public Avg avg() { * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the * maximum value. * - * @return + * @return new instance of {@link Max}. */ + @SuppressWarnings("NullAway") public Max max() { return usesFieldRef() ? Max.maxOf(fieldReference) : Max.maxOf(expression); } + /** + * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the + * requested number of maximum values. + * + * @return new instance of {@link Max}. + * @since 4.0 + */ + public Max max(int numberOfResults) { + return max().limit(numberOfResults); + } + /** * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the * minimum value. * - * @return + * @return new instance of {@link Min}. */ + @SuppressWarnings("NullAway") public Min min() { return usesFieldRef() ? Min.minOf(fieldReference) : Min.minOf(expression); } + /** + * Creates new {@link AggregationExpression} that takes the associated numeric value expression and returns the + * requested number of maximum values. + * + * @return new instance of {@link Max}. + * @since 4.0 + */ + public Min min(int numberOfResults) { + return min().limit(numberOfResults); + } + /** * Creates new {@link AggregationExpression} that takes the associated numeric value expression and calculates the * population standard deviation of the input values. * - * @return + * @return new instance of {@link StdDevPop}. */ + @SuppressWarnings("NullAway") public StdDevPop stdDevPop() { return usesFieldRef() ? StdDevPop.stdDevPopOf(fieldReference) : StdDevPop.stdDevPopOf(expression); } @@ -136,17 +170,154 @@ public StdDevPop stdDevPop() { * Creates new {@link AggregationExpression} that takes the associated numeric value expression and calculates the * sample standard deviation of the input values. * - * @return + * @return new instance of {@link StdDevSamp}. */ + @SuppressWarnings("NullAway") public StdDevSamp stdDevSamp() { return usesFieldRef() ? StdDevSamp.stdDevSampOf(fieldReference) : StdDevSamp.stdDevSampOf(expression); } + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the population covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(String fieldReference) { + return covariancePop().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the population covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(AggregationExpression expression) { + return covariancePop().and(expression); + } + + @SuppressWarnings("NullAway") + private CovariancePop covariancePop() { + return usesFieldRef() ? CovariancePop.covariancePopOf(fieldReference) : CovariancePop.covariancePopOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the sample covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(String fieldReference) { + return covarianceSamp().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the sample covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(AggregationExpression expression) { + return covarianceSamp().and(expression); + } + + @SuppressWarnings("NullAway") + private CovarianceSamp covarianceSamp() { + return usesFieldRef() ? CovarianceSamp.covarianceSampOf(fieldReference) + : CovarianceSamp.covarianceSampOf(expression); + } + + /** + * Creates new {@link ExpMovingAvgBuilder} that to build {@link AggregationExpression expMovingAvg} that calculates + * the exponential moving average of numeric values + * + * @return new instance of {@link ExpMovingAvg}. + * @since 3.3 + */ + @SuppressWarnings("NullAway") + public ExpMovingAvgBuilder expMovingAvg() { + + ExpMovingAvg expMovingAvg = usesFieldRef() ? ExpMovingAvg.expMovingAvgOf(fieldReference) + : ExpMovingAvg.expMovingAvgOf(expression); + return new ExpMovingAvgBuilder() { + + @Override + public ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments) { + return expMovingAvg.n(numberOfHistoricalDocuments); + } + + @Override + public ExpMovingAvg alpha(double exponentialDecayValue) { + return expMovingAvg.alpha(exponentialDecayValue); + } + }; + } + + /** + * Creates new {@link AggregationExpression} that calculates the requested percentile(s) of the associated numeric + * value expression. + * + * @return new instance of {@link Percentile}. + * @param percentages must not be {@literal null}. + * @since 4.2 + */ + @SuppressWarnings("NullAway") + public Percentile percentile(Double... percentages) { + Percentile percentile = usesFieldRef() ? Percentile.percentileOf(fieldReference) + : Percentile.percentileOf(expression); + return percentile.percentages(percentages); + } + + /** + * Creates new {@link AggregationExpression} that calculates the median of the associated numeric value expression. + * + * @return new instance of {@link Median}. + * @since 4.2 + */ + @SuppressWarnings("NullAway") + public Median median() { + return usesFieldRef() ? Median.medianOf(fieldReference) : Median.medianOf(expression); + } + private boolean usesFieldRef() { return fieldReference != null; } } + /** + * Builder for {@link ExpMovingAvg}. + * + * @since 3.3 + */ + public interface ExpMovingAvgBuilder { + + /** + * Define the number of historical documents with significant mathematical weight. + * + * @param numberOfHistoricalDocuments + * @return new instance of {@link ExpMovingAvg}. + */ + ExpMovingAvg historicalDocuments(int numberOfHistoricalDocuments); + + /** + * Define the exponential decay value. + * + * @param exponentialDecayValue + * @return new instance of {@link ExpMovingAvg}. + */ + ExpMovingAvg alpha(double exponentialDecayValue); + + } + /** * {@link AggregationExpression} for {@code $sum}. * @@ -167,11 +338,11 @@ protected String getMongoMethod() { * Creates new {@link Sum}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Sum}. */ public static Sum sumOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Sum(asFields(fieldReference)); } @@ -179,11 +350,12 @@ public static Sum sumOf(String fieldReference) { * Creates new {@link Sum}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Sum}. */ + @Contract("_ -> new") public static Sum sumOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Sum(Collections.singletonList(expression)); } @@ -192,11 +364,12 @@ public static Sum sumOf(AggregationExpression expression) { * NOTE: Only possible in {@code $project} stage. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Sum}. */ + @Contract("_ -> new") public Sum and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Sum(append(Fields.field(fieldReference))); } @@ -205,25 +378,35 @@ public Sum and(String fieldReference) { * NOTE: Only possible in {@code $project} stage. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Sum}. */ + @Contract("_ -> new") public Sum and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Sum(append(expression)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + /** + * Creates new {@link Sum} with all previously added arguments appending the given one.
          + * NOTE: Only possible in {@code $project} stage. + * + * @param value the value to add. + * @return new instance of {@link Sum}. + * @since 2.2 */ + @Contract("_ -> new") + public Sum and(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Sum(append(value)); + } + @Override - @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); @@ -250,11 +433,11 @@ protected String getMongoMethod() { * Creates new {@link Avg}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Avg}. */ public static Avg avgOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Avg(asFields(fieldReference)); } @@ -262,11 +445,11 @@ public static Avg avgOf(String fieldReference) { * Creates new {@link Avg}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Avg}. */ public static Avg avgOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Avg(Collections.singletonList(expression)); } @@ -275,11 +458,12 @@ public static Avg avgOf(AggregationExpression expression) { * NOTE: Only possible in {@code $project} stage. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Avg}. */ + @Contract("_ -> new") public Avg and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Avg(append(Fields.field(fieldReference))); } @@ -288,25 +472,20 @@ public Avg and(String fieldReference) { * NOTE: Only possible in {@code $project} stage. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Avg}. */ + @Contract("_ -> new") public Avg and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Avg(append(expression)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override - @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); @@ -326,31 +505,31 @@ private Max(Object value) { @Override protected String getMongoMethod() { - return "$max"; + return contains("n") ? "$maxN" : "$max"; } /** * Creates new {@link Max}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Max}. */ public static Max maxOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Max(asFields(fieldReference)); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Max(Collections.singletonMap("input", Fields.field(fieldReference))); } /** * Creates new {@link Max}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Max}. */ public static Max maxOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); - return new Max(Collections.singletonList(expression)); + Assert.notNull(expression, "Expression must not be null"); + return new Max(Collections.singletonMap("input", expression)); } /** @@ -358,12 +537,13 @@ public static Max maxOf(AggregationExpression expression) { * NOTE: Only possible in {@code $project} stage. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Max}. */ + @Contract("_ -> new") public Max and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Max(append(Fields.field(fieldReference))); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Max(appendTo("input", Fields.field(fieldReference))); } /** @@ -371,25 +551,42 @@ public Max and(String fieldReference) { * NOTE: Only possible in {@code $project} stage. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Max}. */ + @Contract("_ -> new") public Max and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); - return new Max(append(expression)); + Assert.notNull(expression, "Expression must not be null"); + return new Max(appendTo("input", expression)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + /** + * Creates new {@link Max} that returns the given number of maximum values ({@literal $maxN}). + * NOTE: Cannot be used with more than one {@literal input} value. + * + * @param numberOfResults + * @return new instance of {@link Max}. */ + @Contract("_ -> new") + public Max limit(int numberOfResults) { + return new Max(append("n", numberOfResults)); + } + + @Override + @SuppressWarnings("NullAway") + public Document toDocument(AggregationOperationContext context) { + if (get("n") == null) { + return toDocument(get("input"), context); + } + return super.toDocument(context); + } + @Override @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); @@ -409,31 +606,31 @@ private Min(Object value) { @Override protected String getMongoMethod() { - return "$min"; + return contains("n") ? "$minN" : "$min"; } /** * Creates new {@link Min}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Min}. */ public static Min minOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Min(asFields(fieldReference)); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Min(Collections.singletonMap("input", Fields.field(fieldReference))); } /** * Creates new {@link Min}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Min}. */ public static Min minOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); - return new Min(Collections.singletonList(expression)); + Assert.notNull(expression, "Expression must not be null"); + return new Min(Collections.singletonMap("input", expression)); } /** @@ -441,12 +638,13 @@ public static Min minOf(AggregationExpression expression) { * NOTE: Only possible in {@code $project} stage. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Min}. */ + @Contract("_ -> new") public Min and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new Min(append(Fields.field(fieldReference))); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Min(appendTo("input", Fields.field(fieldReference))); } /** @@ -454,25 +652,42 @@ public Min and(String fieldReference) { * NOTE: Only possible in {@code $project} stage. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Min}. */ + @Contract("_ -> new") public Min and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); - return new Min(append(expression)); + Assert.notNull(expression, "Expression must not be null"); + return new Min(appendTo("input", expression)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) + /** + * Creates new {@link Min} that returns the given number of minimum values ({@literal $minN}). + * NOTE: Cannot be used with more than one {@literal input} value. + * + * @param numberOfResults + * @return new instance of {@link Min}. */ + @Contract("_ -> new") + public Min limit(int numberOfResults) { + return new Min(append("n", numberOfResults)); + } + + @Override + @SuppressWarnings("NullAway") + public Document toDocument(AggregationOperationContext context) { + + if (get("n") == null) { + return toDocument(get("input"), context); + } + return super.toDocument(context); + } + @Override - @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); @@ -499,11 +714,11 @@ protected String getMongoMethod() { * Creates new {@link StdDevPop}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link StdDevPop}. */ public static StdDevPop stdDevPopOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new StdDevPop(asFields(fieldReference)); } @@ -511,11 +726,11 @@ public static StdDevPop stdDevPopOf(String fieldReference) { * Creates new {@link StdDevPop} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link StdDevPop}. */ public static StdDevPop stdDevPopOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new StdDevPop(Collections.singletonList(expression)); } @@ -524,11 +739,12 @@ public static StdDevPop stdDevPopOf(AggregationExpression expression) { * NOTE: Only possible in {@code $project} stage. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link StdDevPop}. */ + @Contract("_ -> new") public StdDevPop and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new StdDevPop(append(Fields.field(fieldReference))); } @@ -537,25 +753,20 @@ public StdDevPop and(String fieldReference) { * NOTE: Only possible in {@code $project} stage. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link StdDevPop}. */ + @Contract("_ -> new") public StdDevPop and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new StdDevPop(append(expression)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override - @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); @@ -582,11 +793,11 @@ protected String getMongoMethod() { * Creates new {@link StdDevSamp}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link StdDevSamp}. */ public static StdDevSamp stdDevSampOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new StdDevSamp(asFields(fieldReference)); } @@ -594,11 +805,11 @@ public static StdDevSamp stdDevSampOf(String fieldReference) { * Creates new {@link StdDevSamp}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link StdDevSamp}. */ public static StdDevSamp stdDevSampOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new StdDevSamp(Collections.singletonList(expression)); } @@ -607,11 +818,12 @@ public static StdDevSamp stdDevSampOf(AggregationExpression expression) { * NOTE: Only possible in {@code $project} stage. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link StdDevSamp}. */ + @Contract("_ -> new") public StdDevSamp and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new StdDevSamp(append(Fields.field(fieldReference))); } @@ -620,28 +832,375 @@ public StdDevSamp and(String fieldReference) { * NOTE: Only possible in {@code $project} stage. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link StdDevSamp}. */ + @Contract("_ -> new") public StdDevSamp and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new StdDevSamp(append(expression)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.AbstractAggregationExpression#toDocument(java.lang.Object, org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override - @SuppressWarnings("unchecked") public Document toDocument(Object value, AggregationOperationContext context) { - if (value instanceof List) { - if (((List) value).size() == 1) { - return super.toDocument(((List) value).iterator().next(), context); - } + if (value instanceof List list && list.size() == 1) { + return super.toDocument(list.iterator().next(), context); } return super.toDocument(value, context); } } + + /** + * {@link AggregationExpression} for {@code $covariancePop}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class CovariancePop extends AbstractAggregationExpression { + + private CovariancePop(Object value) { + super(value); + } + + /** + * Creates new {@link CovariancePop}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public static CovariancePop covariancePopOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new CovariancePop(asFields(fieldReference)); + } + + /** + * Creates new {@link CovariancePop}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + public static CovariancePop covariancePopOf(AggregationExpression expression) { + return new CovariancePop(Collections.singletonList(expression)); + } + + /** + * Creates new {@link CovariancePop} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + @Contract("_ -> new") + public CovariancePop and(String fieldReference) { + return new CovariancePop(append(asFields(fieldReference))); + } + + /** + * Creates new {@link CovariancePop} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + */ + @Contract("_ -> new") + public CovariancePop and(AggregationExpression expression) { + return new CovariancePop(append(expression)); + } + + @Override + protected String getMongoMethod() { + return "$covariancePop"; + } + } + + /** + * {@link AggregationExpression} for {@code $covarianceSamp}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class CovarianceSamp extends AbstractAggregationExpression { + + private CovarianceSamp(Object value) { + super(value); + } + + /** + * Creates new {@link CovarianceSamp}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public static CovarianceSamp covarianceSampOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new CovarianceSamp(asFields(fieldReference)); + } + + /** + * Creates new {@link CovarianceSamp}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + public static CovarianceSamp covarianceSampOf(AggregationExpression expression) { + return new CovarianceSamp(Collections.singletonList(expression)); + } + + /** + * Creates new {@link CovarianceSamp} with all previously added arguments appending the given one. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + @Contract("_ -> new") + public CovarianceSamp and(String fieldReference) { + return new CovarianceSamp(append(asFields(fieldReference))); + } + + /** + * Creates new {@link CovarianceSamp} with all previously added arguments appending the given one. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovarianceSamp}. + */ + @Contract("_ -> new") + public CovarianceSamp and(AggregationExpression expression) { + return new CovarianceSamp(append(expression)); + } + + @Override + protected String getMongoMethod() { + return "$covarianceSamp"; + } + } + + /** + * {@link ExpMovingAvg} calculates the exponential moving average of numeric values. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class ExpMovingAvg extends AbstractAggregationExpression { + + private ExpMovingAvg(Object value) { + super(value); + } + + /** + * Create a new {@link ExpMovingAvg} by defining the field holding the value to be used as input. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ExpMovingAvg}. + */ + public static ExpMovingAvg expMovingAvgOf(String fieldReference) { + return new ExpMovingAvg(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Create a new {@link ExpMovingAvg} by defining the {@link AggregationExpression expression} to compute the value + * to be used as input. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ExpMovingAvg}. + */ + public static ExpMovingAvg expMovingAvgOf(AggregationExpression expression) { + return new ExpMovingAvg(Collections.singletonMap("input", expression)); + } + + /** + * Define the number of historical documents with significant mathematical weight.
          + * Specify either {@link #n(int) N} or {@link #alpha(double) aplha}. Not both! + * + * @param numberOfHistoricalDocuments + * @return new instance of {@link ExpMovingAvg}. + */ + @Contract("_ -> new") + public ExpMovingAvg n/*umber of historical documents*/(int numberOfHistoricalDocuments) { + return new ExpMovingAvg(append("N", numberOfHistoricalDocuments)); + } + + /** + * Define the exponential decay value.
          + * Specify either {@link #alpha(double) aplha} or {@link #n(int) N}. Not both! + * + * @param exponentialDecayValue + * @return new instance of {@link ExpMovingAvg}. + */ + @Contract("_ -> new") + public ExpMovingAvg alpha(double exponentialDecayValue) { + return new ExpMovingAvg(append("alpha", exponentialDecayValue)); + } + + @Override + protected String getMongoMethod() { + return "$expMovingAvg"; + } + } + + /** + * {@link AggregationExpression} for {@code $percentile}. + * + * @author Julia Lee + * @since 4.2 + */ + public static class Percentile extends AbstractAggregationExpression { + + private Percentile(Object value) { + super(value); + } + + /** + * Creates new {@link Percentile}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + public static Percentile percentileOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + Map fields = new HashMap<>(); + fields.put("input", Fields.field(fieldReference)); + fields.put("method", "approximate"); + return new Percentile(fields); + } + + /** + * Creates new {@link Percentile}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + public static Percentile percentileOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + Map fields = new HashMap<>(); + fields.put("input", expression); + fields.put("method", "approximate"); + return new Percentile(fields); + } + + /** + * Define the percentile value(s) that must resolve to percentages in the range {@code 0.0 - 1.0} inclusive. + * + * @param percentages must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + @Contract("_ -> new") + public Percentile percentages(Double... percentages) { + + Assert.notEmpty(percentages, "Percentages must not be null or empty"); + return new Percentile(append("p", Arrays.asList(percentages))); + } + + /** + * Creates new {@link Percentile} with all previously added inputs appending the given one.
          + * NOTE: Only possible in {@code $project} stage. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + @Contract("_ -> new") + public Percentile and(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Percentile(appendTo("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link Percentile} with all previously added inputs appending the given one.
          + * NOTE: Only possible in {@code $project} stage. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Percentile}. + */ + @Contract("_ -> new") + public Percentile and(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Percentile(appendTo("input", expression)); + } + + @Override + protected String getMongoMethod() { + return "$percentile"; + } + } + + /** + * {@link AggregationExpression} for {@code $median}. + * + * @author Julia Lee + * @since 4.2 + */ + public static class Median extends AbstractAggregationExpression { + + private Median(Object value) { + super(value); + } + + /** + * Creates new {@link Median}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Median}. + */ + public static Median medianOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + Map fields = new HashMap<>(); + fields.put("input", Fields.field(fieldReference)); + fields.put("method", "approximate"); + return new Median(fields); + } + + /** + * Creates new {@link Median}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Median}. + */ + public static Median medianOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + Map fields = new HashMap<>(); + fields.put("input", expression); + fields.put("method", "approximate"); + return new Median(fields); + } + + /** + * Creates new {@link Median} with all previously added inputs appending the given one.
          + * NOTE: Only possible in {@code $project} stage. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Median}. + */ + @Contract("_ -> new") + public Median and(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Median(appendTo("input", Fields.field(fieldReference))); + } + + /** + * Creates new {@link Median} with all previously added inputs appending the given one.
          + * NOTE: Only possible in {@code $project} stage. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Median}. + */ + @Contract("_ -> new") + public Median and(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Median(appendTo("input", expression)); + } + + @Override + protected String getMongoMethod() { + return "$median"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java new file mode 100644 index 0000000000..3cb75d3050 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AddFieldsOperation.java @@ -0,0 +1,205 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.jspecify.annotations.Nullable; +import org.springframework.data.mongodb.core.aggregation.AddFieldsOperation.AddFieldsOperationBuilder.ValueAppender; +import org.springframework.lang.Contract; + +/** + * Adds new fields to documents. {@code $addFields} outputs documents that contain all existing fields from the input + * documents and newly added fields. + * + *
          + * AddFieldsOperation.addField("totalHomework").withValue("A+").and().addField("totalQuiz").withValue("B-")
          + * 
          + * + * @author Christoph Strobl + * @author Kim Sumin + * @since 3.0 + * @see MongoDB Aggregation + * Framework: $addFields + */ +public class AddFieldsOperation extends DocumentEnhancingOperation { + + /** + * Create new instance of {@link AddFieldsOperation} adding map keys as exposed fields. + * + * @param source must not be {@literal null}. + */ + private AddFieldsOperation(Map source) { + super(source); + } + + /** + * Create new instance of {@link AddFieldsOperation} + * + * @param field must not be {@literal null}. + * @param value can be {@literal null}. + */ + public AddFieldsOperation(Object field, @Nullable Object value) { + this(Collections.singletonMap(field, value)); + } + + /** + * Define the {@link AddFieldsOperation} via {@link AddFieldsOperationBuilder}. + * + * @return new instance of {@link AddFieldsOperationBuilder}. + */ + public static AddFieldsOperationBuilder builder() { + return new AddFieldsOperationBuilder(); + } + + /** + * Concatenate another field to add. + * + * @param field must not be {@literal null}. + * @return new instance of {@link AddFieldsOperationBuilder}. + */ + public static ValueAppender addField(String field) { + return new AddFieldsOperationBuilder().addField(field); + } + + /** + * Append the value for a specific field to the operation. + * + * @param field the target field to add. + * @param value the value to assign. + * @return new instance of {@link AddFieldsOperation}. + */ + @Contract("_ -> new") + public AddFieldsOperation addField(Object field, Object value) { + + LinkedHashMap target = new LinkedHashMap<>(getValueMap()); + target.put(field, value); + + return new AddFieldsOperation(target); + } + + /** + * Concatenate additional fields to add. + * + * @return new instance of {@link AddFieldsOperationBuilder}. + */ + @Contract("-> new") + public AddFieldsOperationBuilder and() { + return new AddFieldsOperationBuilder(getValueMap()); + } + + @Override + protected String mongoOperator() { + return "$addFields"; + } + + /** + * @author Christoph Strobl + * @since 3.0 + */ + public static class AddFieldsOperationBuilder { + + private final Map valueMap; + + private AddFieldsOperationBuilder() { + this.valueMap = new LinkedHashMap<>(); + } + + private AddFieldsOperationBuilder(Map source) { + this.valueMap = new LinkedHashMap<>(source); + } + + public AddFieldsOperationBuilder addFieldWithValue(String field, @Nullable Object value) { + return addField(field).withValue(value); + } + + public AddFieldsOperationBuilder addFieldWithValueOf(String field, Object value) { + return addField(field).withValueOf(value); + } + + /** + * Define the field to add. + * + * @param field must not be {@literal null}. + * @return new instance of {@link ValueAppender}. + */ + public ValueAppender addField(String field) { + + return new ValueAppender() { + + @Override + public AddFieldsOperationBuilder withValue(@Nullable Object value) { + + valueMap.put(field, value); + return AddFieldsOperationBuilder.this; + } + + @Override + public AddFieldsOperationBuilder withValueOf(Object value) { + + valueMap.put(field, value instanceof String stringValue ? Fields.field(stringValue) : value); + return AddFieldsOperationBuilder.this; + } + + @Override + public AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values) { + + valueMap.put(field, new ExpressionProjection(operation, values)); + return AddFieldsOperationBuilder.this; + } + }; + } + + public AddFieldsOperation build() { + return new AddFieldsOperation(valueMap); + } + + /** + * @author Christoph Strobl + * @since 3.0 + */ + public interface ValueAppender { + + /** + * Define the value to assign as is. + * + * @param value can be {@literal null}. + * @return new instance of {@link AddFieldsOperation}. + */ + AddFieldsOperationBuilder withValue(@Nullable Object value); + + /** + * Define the value to assign. Plain {@link String} values are treated as {@link Field field references}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link AddFieldsOperation}. + */ + AddFieldsOperationBuilder withValueOf(Object value); + + /** + * Adds a generic projection for the current field. + * + * @param operation the operation key, e.g. {@code $add}. + * @param values the values to be set for the projection operation. + * @return new instance of {@link AddFieldsOperation}. + */ + AddFieldsOperationBuilder withValueOfExpression(String operation, Object... values); + } + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java index 9fd7db3b07..45de38ed21 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/Aggregation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,18 +21,22 @@ import java.util.List; import org.bson.Document; +import org.bson.conversions.Bson; import org.springframework.data.domain.Sort; import org.springframework.data.domain.Sort.Direction; +import org.springframework.data.mongodb.core.aggregation.AddFieldsOperation.AddFieldsOperationBuilder; import org.springframework.data.mongodb.core.aggregation.CountOperation.CountOperationBuilder; import org.springframework.data.mongodb.core.aggregation.FacetOperation.FacetOperationBuilder; import org.springframework.data.mongodb.core.aggregation.GraphLookupOperation.StartWithBuilder; +import org.springframework.data.mongodb.core.aggregation.LookupOperation.LookupOperationBuilder; +import org.springframework.data.mongodb.core.aggregation.MergeOperation.MergeOperationBuilder; import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootDocumentOperationBuilder; import org.springframework.data.mongodb.core.aggregation.ReplaceRootOperation.ReplaceRootOperationBuilder; +import org.springframework.data.mongodb.core.mapping.FieldName; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.CriteriaDefinition; import org.springframework.data.mongodb.core.query.NearQuery; import org.springframework.data.mongodb.core.query.SerializationUtils; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; /** @@ -48,6 +52,7 @@ * @author Nikolay Bogdanov * @author Gustavo de Geus * @author Jérôme Guyon + * @author Sangyong Choi * @since 1.3 */ public class Aggregation { @@ -94,7 +99,7 @@ public class Aggregation { public static final AggregationOperationContext DEFAULT_CONTEXT = AggregationOperationRenderer.DEFAULT_CONTEXT; public static final AggregationOptions DEFAULT_OPTIONS = newAggregationOptions().build(); - protected final List operations; + protected final AggregationPipeline pipeline; private final AggregationOptions options; /** @@ -115,18 +120,29 @@ public static Aggregation newAggregation(AggregationOperation... operations) { return new Aggregation(operations); } + /** + * Creates a new {@link AggregationUpdate} from the given {@link AggregationOperation}s. + * + * @param operations can be {@literal empty} but must not be {@literal null}. + * @return new instance of {@link AggregationUpdate}. + * @since 3.0 + */ + public static AggregationUpdate newUpdate(AggregationOperation... operations) { + return AggregationUpdate.from(Arrays.asList(operations)); + } + /** * Returns a copy of this {@link Aggregation} with the given {@link AggregationOptions} set. Note that options are * supported in MongoDB version 2.6+. * * @param options must not be {@literal null}. - * @return + * @return new instance of {@link Aggregation}. * @since 1.6 */ public Aggregation withOptions(AggregationOptions options) { - Assert.notNull(options, "AggregationOptions must not be null."); - return new Aggregation(this.operations, options); + Assert.notNull(options, "AggregationOptions must not be null"); + return new Aggregation(this.pipeline.getOperations(), options); } /** @@ -164,7 +180,7 @@ protected Aggregation(AggregationOperation... aggregationOperations) { */ protected static List asAggregationList(AggregationOperation... aggregationOperations) { - Assert.notEmpty(aggregationOperations, "AggregationOperations must not be null or empty!"); + Assert.notEmpty(aggregationOperations, "AggregationOperations must not be null or empty"); return Arrays.asList(aggregationOperations); } @@ -181,30 +197,18 @@ protected Aggregation(List aggregationOperations) { /** * Creates a new {@link Aggregation} from the given {@link AggregationOperation}s. * - * @param aggregationOperations must not be {@literal null} or empty. + * @param aggregationOperations must not be {@literal null}. * @param options must not be {@literal null} or empty. */ protected Aggregation(List aggregationOperations, AggregationOptions options) { - Assert.notNull(aggregationOperations, "AggregationOperations must not be null!"); - Assert.isTrue(!aggregationOperations.isEmpty(), "At least one AggregationOperation has to be provided"); - Assert.notNull(options, "AggregationOptions must not be null!"); - - // check $out is the last operation if it exists - for (AggregationOperation aggregationOperation : aggregationOperations) { - if (aggregationOperation instanceof OutOperation && !isLast(aggregationOperation, aggregationOperations)) { - throw new IllegalArgumentException("The $out operator must be the last stage in the pipeline."); - } - } + Assert.notNull(aggregationOperations, "AggregationOperations must not be null"); + Assert.notNull(options, "AggregationOptions must not be null"); - this.operations = aggregationOperations; + this.pipeline = new AggregationPipeline(aggregationOperations); this.options = options; } - private boolean isLast(AggregationOperation aggregationOperation, List aggregationOperations) { - return aggregationOperations.indexOf(aggregationOperation) == aggregationOperations.size() - 1; - } - /** * Get the {@link AggregationOptions}. * @@ -221,14 +225,61 @@ public AggregationOptions getOptions() { * @return */ public static String previousOperation() { - return "_id"; + return FieldName.ID.name(); + } + + /** + * Obtain an {@link AddFieldsOperationBuilder builder} instance to create a new {@link AddFieldsOperation}.
          + * Starting in version 4.2, MongoDB adds a new aggregation pipeline stage {@link AggregationUpdate#set $set} that is + * an alias for {@code $addFields}. + * + * @return new instance of {@link AddFieldsOperationBuilder}. + * @see AddFieldsOperation + * @since 3.0 + */ + public static AddFieldsOperationBuilder addFields() { + return AddFieldsOperation.builder(); + } + + /** + * Creates a new {@link AggregationOperation} taking the given {@link Bson bson value} as is.
          + * + *
          +	 * Aggregation.stage(Aggregates.search(exists(fieldPath("..."))));
          +	 * 
          + * + * Field mapping against a potential domain type or previous aggregation stages will not happen. + * + * @param aggregationOperation the must not be {@literal null}. + * @return new instance of {@link AggregationOperation}. + * @since 4.0 + */ + public static AggregationOperation stage(Bson aggregationOperation) { + return new BasicAggregationOperation(aggregationOperation); + } + + /** + * Creates a new {@link AggregationOperation} taking the given {@link String json value} as is.
          + * + *
          +	 * Aggregation.stage("{ $search : { near : { path : 'released' , origin : ... } } }");
          +	 * 
          + * + * Field mapping against a potential domain type or previous aggregation stages will not happen. + * + * @param json the JSON representation of the pipeline stage. Must not be {@literal null}. + * @return new instance of {@link AggregationOperation}. + * @since 4.0 + */ + public static AggregationOperation stage(String json) { + return new BasicAggregationOperation(json); } /** * Creates a new {@link ProjectionOperation} including the given fields. * * @param fields must not be {@literal null}. - * @return + * @return new instance of {@link ProjectionOperation}. */ public static ProjectionOperation project(String... fields) { return project(fields(fields)); @@ -238,17 +289,30 @@ public static ProjectionOperation project(String... fields) { * Creates a new {@link ProjectionOperation} including the given {@link Fields}. * * @param fields must not be {@literal null}. - * @return + * @return new instance of {@link ProjectionOperation}. */ public static ProjectionOperation project(Fields fields) { return new ProjectionOperation(fields); } + /** + * Creates a new {@link ProjectionOperation} including all top level fields of the given given {@link Class}. + * + * @param type must not be {@literal null}. + * @return new instance of {@link ProjectionOperation}. + * @since 2.2 + */ + public static ProjectionOperation project(Class type) { + + Assert.notNull(type, "Type must not be null"); + return new ProjectionOperation(type); + } + /** * Factory method to create a new {@link UnwindOperation} for the field with the given name. * * @param field must not be {@literal null} or empty. - * @return + * @return new instance of {@link UnwindOperation}. */ public static UnwindOperation unwind(String field) { return new UnwindOperation(field(field)); @@ -258,7 +322,7 @@ public static UnwindOperation unwind(String field) { * Factory method to create a new {@link ReplaceRootOperation} for the field with the given name. * * @param fieldName must not be {@literal null} or empty. - * @return + * @return new instance of {@link ReplaceRootOperation}. * @since 1.10 */ public static ReplaceRootOperation replaceRoot(String fieldName) { @@ -270,7 +334,7 @@ public static ReplaceRootOperation replaceRoot(String fieldName) { * {@link AggregationExpression}. * * @param aggregationExpression must not be {@literal null}. - * @return + * @return new instance of {@link ReplaceRootOperation}. * @since 1.10 */ public static ReplaceRootOperation replaceRoot(AggregationExpression aggregationExpression) { @@ -317,9 +381,9 @@ public static UnwindOperation unwind(String field, String arrayIndex) { } /** - * Factory method to create a new {@link UnwindOperation} for the field with the given nameincluding the name of a new - * field to hold the array index of the element as {@code arrayIndex} using {@code preserveNullAndEmptyArrays}. Note - * that extended unwind is supported in MongoDB version 3.2+. + * Factory method to create a new {@link UnwindOperation} for the field with the given name, including the name of a + * new field to hold the array index of the element as {@code arrayIndex} using {@code preserveNullAndEmptyArrays}. + * Note that extended unwind is supported in MongoDB version 3.2+. * * @param field must not be {@literal null} or empty. * @param arrayIndex must not be {@literal null} or empty. @@ -336,7 +400,7 @@ public static UnwindOperation unwind(String field, String arrayIndex, boolean pr * Creates a new {@link GroupOperation} for the given fields. * * @param fields must not be {@literal null}. - * @return + * @return new instance of {@link GroupOperation}. */ public static GroupOperation group(String... fields) { return group(fields(fields)); @@ -357,18 +421,32 @@ public static GroupOperation group(Fields fields) { * {@link GraphLookupOperation} given {@literal fromCollection}. * * @param fromCollection must not be {@literal null} or empty. - * @return + * @return new instance of {@link StartWithBuilder} for creating a {@link GraphLookupOperation}. * @since 1.10 */ public static StartWithBuilder graphLookup(String fromCollection) { return GraphLookupOperation.builder().from(fromCollection); } + /** + * Creates a new {@link VectorSearchOperation} by starting from the {@code indexName} to use. + * + * @param indexName must not be {@literal null} or empty. + * @return new instance of {@link VectorSearchOperation.PathContributor}. + * @since 4.5 + */ + public static VectorSearchOperation.PathContributor vectorSearch(String indexName) { + + Assert.hasText(indexName, "Index name must not be null or empty"); + + return VectorSearchOperation.search(indexName); + } + /** * Factory method to create a new {@link SortOperation} for the given {@link Sort}. * * @param sort must not be {@literal null}. - * @return + * @return new instance of {@link SortOperation}. */ public static SortOperation sort(Sort sort) { return new SortOperation(sort); @@ -379,7 +457,7 @@ public static SortOperation sort(Sort sort) { * * @param direction must not be {@literal null}. * @param fields must not be {@literal null}. - * @return + * @return new instance of {@link SortOperation}. */ public static SortOperation sort(Direction direction, String... fields) { return new SortOperation(Sort.by(direction, fields)); @@ -389,7 +467,7 @@ public static SortOperation sort(Direction direction, String... fields) { * Creates a new {@link SortByCountOperation} given {@literal groupByField}. * * @param field must not be {@literal null} or empty. - * @return + * @return new instance of {@link SortByCountOperation}. * @since 2.1 */ public static SortByCountOperation sortByCount(String field) { @@ -400,7 +478,7 @@ public static SortByCountOperation sortByCount(String field) { * Creates a new {@link SortByCountOperation} given {@link AggregationExpression group and sort expression}. * * @param groupAndSortExpression must not be {@literal null}. - * @return + * @return new instance of {@link SortByCountOperation}. * @since 2.1 */ public static SortByCountOperation sortByCount(AggregationExpression groupAndSortExpression) { @@ -411,18 +489,7 @@ public static SortByCountOperation sortByCount(AggregationExpression groupAndSor * Creates a new {@link SkipOperation} skipping the given number of elements. * * @param elementsToSkip must not be less than zero. - * @return - * @deprecated prepare to get this one removed in favor of {@link #skip(long)}. - */ - public static SkipOperation skip(int elementsToSkip) { - return new SkipOperation(elementsToSkip); - } - - /** - * Creates a new {@link SkipOperation} skipping the given number of elements. - * - * @param elementsToSkip must not be less than zero. - * @return + * @return new instance of {@link SkipOperation}. */ public static SkipOperation skip(long elementsToSkip) { return new SkipOperation(elementsToSkip); @@ -432,7 +499,7 @@ public static SkipOperation skip(long elementsToSkip) { * Creates a new {@link LimitOperation} limiting the result to the given number of elements. * * @param maxElements must not be less than zero. - * @return + * @return new instance of {@link LimitOperation}. */ public static LimitOperation limit(long maxElements) { return new LimitOperation(maxElements); @@ -442,7 +509,7 @@ public static LimitOperation limit(long maxElements) { * Creates a new {@link SampleOperation} to select the specified number of documents from its input randomly. * * @param sampleSize must not be less than zero. - * @return + * @return new instance of {@link SampleOperation}. * @since 2.0 */ public static SampleOperation sample(long sampleSize) { @@ -453,7 +520,7 @@ public static SampleOperation sample(long sampleSize) { * Creates a new {@link MatchOperation} using the given {@link Criteria}. * * @param criteria must not be {@literal null}. - * @return + * @return new instance of {@link MatchOperation}. */ public static MatchOperation match(Criteria criteria) { return new MatchOperation(criteria); @@ -463,13 +530,48 @@ public static MatchOperation match(Criteria criteria) { * Creates a new {@link MatchOperation} using the given {@link CriteriaDefinition}. * * @param criteria must not be {@literal null}. - * @return + * @return new instance of {@link MatchOperation}. * @since 1.10 */ public static MatchOperation match(CriteriaDefinition criteria) { return new MatchOperation(criteria); } + /** + * Creates a new {@link MatchOperation} using the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link MatchOperation}. + * @since 3.3 + */ + public static MatchOperation match(AggregationExpression expression) { + return new MatchOperation(expression); + } + + /** + * Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the {@code distanceField}. The + * {@code distanceField} defines output field that contains the calculated distance. + * + * @param query must not be {@literal null}. + * @param distanceField must not be {@literal null} or empty. + * @return new instance of {@link GeoNearOperation}. + * @since 1.7 + */ + public static GeoNearOperation geoNear(NearQuery query, String distanceField) { + return new GeoNearOperation(query, distanceField); + } + + /** + * Obtain a {@link MergeOperationBuilder builder} instance to create a new {@link MergeOperation}. + * + * @return new instance of {@link MergeOperationBuilder}. + * @see MergeOperation + * @since 3.0 + */ + public static MergeOperationBuilder merge() { + return MergeOperation.builder(); + } + /** * Creates a new {@link OutOperation} using the given collection name. This operation must be the last operation in * the pipeline. @@ -478,7 +580,7 @@ public static MatchOperation match(CriteriaDefinition criteria) { * collection in the current database if one does not already exist. The collection is not visible until the * aggregation completes. If the aggregation fails, MongoDB does not create the collection. Must not be * {@literal null}. - * @return + * @return new instance of {@link OutOperation}. */ public static OutOperation out(String outCollectionName) { return new OutOperation(outCollectionName); @@ -488,7 +590,7 @@ public static OutOperation out(String outCollectionName) { * Creates a new {@link BucketOperation} given {@literal groupByField}. * * @param groupByField must not be {@literal null} or empty. - * @return + * @return new instance of {@link BucketOperation}. * @since 1.10 */ public static BucketOperation bucket(String groupByField) { @@ -499,7 +601,7 @@ public static BucketOperation bucket(String groupByField) { * Creates a new {@link BucketOperation} given {@link AggregationExpression group-by expression}. * * @param groupByExpression must not be {@literal null}. - * @return + * @return new instance of {@link BucketOperation}. * @since 1.10 */ public static BucketOperation bucket(AggregationExpression groupByExpression) { @@ -511,7 +613,7 @@ public static BucketOperation bucket(AggregationExpression groupByExpression) { * * @param groupByField must not be {@literal null} or empty. * @param buckets number of buckets, must be a positive integer. - * @return + * @return new instance of {@link BucketAutoOperation}. * @since 1.10 */ public static BucketAutoOperation bucketAuto(String groupByField, int buckets) { @@ -523,7 +625,7 @@ public static BucketAutoOperation bucketAuto(String groupByField, int buckets) { * * @param groupByExpression must not be {@literal null}. * @param buckets number of buckets, must be a positive integer. - * @return + * @return new instance of {@link BucketAutoOperation}. * @since 1.10 */ public static BucketAutoOperation bucketAuto(AggregationExpression groupByExpression, int buckets) { @@ -533,7 +635,7 @@ public static BucketAutoOperation bucketAuto(AggregationExpression groupByExpres /** * Creates a new {@link FacetOperation}. * - * @return + * @return new instance of {@link FacetOperation}. * @since 1.10 */ public static FacetOperation facet() { @@ -544,7 +646,7 @@ public static FacetOperation facet() { * Creates a new {@link FacetOperationBuilder} given {@link Aggregation}. * * @param aggregationOperations the sub-pipeline, must not be {@literal null}. - * @return + * @return new instance of {@link FacetOperation}. * @since 1.10 */ public static FacetOperationBuilder facet(AggregationOperation... aggregationOperations) { @@ -579,6 +681,23 @@ public static LookupOperation lookup(Field from, Field localField, Field foreign return new LookupOperation(from, localField, foreignField, as); } + /** + * Entrypoint for creating {@link LookupOperation $lookup} using a fluent builder API. + * + *
          +	 * Aggregation.lookup().from("restaurants").localField("restaurant_name").foreignField("name")
          +	 * 		.let(newVariable("orders_drink").forField("drink"))
          +	 * 		.pipeline(match(ctx -> new Document("$expr", new Document("$in", List.of("$$orders_drink", "$beverages")))))
          +	 * 		.as("matches")
          +	 * 
          + * + * @return new instance of {@link LookupOperationBuilder}. + * @since 4.1 + */ + public static LookupOperationBuilder lookup() { + return new LookupOperationBuilder(); + } + /** * Creates a new {@link CountOperationBuilder}. * @@ -589,11 +708,31 @@ public static CountOperationBuilder count() { return new CountOperationBuilder(); } + /** + * Creates a new {@link RedactOperation} that can restrict the content of a document based on information stored + * within the document itself. + * + *
          +	 *
          +	 * Aggregation.redact(ConditionalOperators.when(Criteria.where("level").is(5)) //
          +	 * 		.then(RedactOperation.PRUNE) //
          +	 * 		.otherwise(RedactOperation.DESCEND));
          +	 * 
          + * + * @param condition Any {@link AggregationExpression} that resolves to {@literal $$DESCEND}, {@literal $$PRUNE}, or + * {@literal $$KEEP}. Must not be {@literal null}. + * @return new instance of {@link RedactOperation}. Never {@literal null}. + * @since 3.0 + */ + public static RedactOperation redact(AggregationExpression condition) { + return new RedactOperation(condition); + } + /** * Creates a new {@link Fields} instance for the given field names. * * @param fields must not be {@literal null}. - * @return + * @return new instance of {@link Fields}. * @see Fields#fields(String...) */ public static Fields fields(String... fields) { @@ -605,29 +744,16 @@ public static Fields fields(String... fields) { * * @param name must not be {@literal null} or empty. * @param target must not be {@literal null} or empty. - * @return + * @return new instance of {@link Fields}. */ public static Fields bind(String name, String target) { return Fields.from(field(name, target)); } - /** - * Creates a new {@link GeoNearOperation} instance from the given {@link NearQuery} and the{@code distanceField}. The - * {@code distanceField} defines output field that contains the calculated distance. - * - * @param query must not be {@literal null}. - * @param distanceField must not be {@literal null} or empty. - * @return - * @since 1.7 - */ - public static GeoNearOperation geoNear(NearQuery query, String distanceField) { - return new GeoNearOperation(query, distanceField); - } - /** * Returns a new {@link AggregationOptions.Builder}. * - * @return + * @return new instance of {@link AggregationOptions.Builder}. * @since 1.6 */ public static AggregationOptions.Builder newAggregationOptions() { @@ -642,12 +768,19 @@ public static AggregationOptions.Builder newAggregationOptions() { * @since 2.1 */ public List toPipeline(AggregationOperationContext rootContext) { - return AggregationOperationRenderer.toDocument(operations, rootContext); + return pipeline.toDocuments(rootContext); } /** - * Converts this {@link Aggregation} specification to a {@link Document}. - *

          + * @return the {@link AggregationPipeline}. + * @since 3.0.2 + */ + public AggregationPipeline getPipeline() { + return pipeline; + } + + /** + * Converts this {@link Aggregation} specification to a {@link Document}.
          * MongoDB requires as of 3.6 cursor-based aggregation. Use {@link #toPipeline(AggregationOperationContext)} to render * an aggregation pipeline. * @@ -662,60 +795,8 @@ public Document toDocument(String inputCollectionName, AggregationOperationConte return options.applyAndReturnPotentiallyChangedCommand(command); } - /* - * (non-Javadoc) - * @see java.lang.Object#toString() - */ @Override public String toString() { return SerializationUtils.serializeToJsonSafely(toDocument("__collection__", DEFAULT_CONTEXT)); } - - /** - * Describes the system variables available in MongoDB aggregation framework pipeline expressions. - * - * @author Thomas Darimont - * @author Christoph Strobl - * @see Aggregation Variables. - */ - enum SystemVariable { - - ROOT, CURRENT, REMOVE; - - private static final String PREFIX = "$$"; - - /** - * Return {@literal true} if the given {@code fieldRef} denotes a well-known system variable, {@literal false} - * otherwise. - * - * @param fieldRef may be {@literal null}. - * @return - */ - public static boolean isReferingToSystemVariable(@Nullable String fieldRef) { - - if (fieldRef == null || !fieldRef.startsWith(PREFIX) || fieldRef.length() <= 2) { - return false; - } - - int indexOfFirstDot = fieldRef.indexOf('.'); - String candidate = fieldRef.substring(2, indexOfFirstDot == -1 ? fieldRef.length() : indexOfFirstDot); - - for (SystemVariable value : values()) { - if (value.name().equals(candidate)) { - return true; - } - } - - return false; - } - - /* - * (non-Javadoc) - * @see java.lang.Enum#toString() - */ - @Override - public String toString() { - return PREFIX.concat(name()); - } - } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpression.java index 17f8422c19..1cb38ef362 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpression.java @@ -1,11 +1,11 @@ /* - * Copyright 2015-2018 the original author or authors. + * Copyright 2015-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,23 +16,58 @@ package org.springframework.data.mongodb.core.aggregation; import org.bson.Document; +import org.springframework.data.mongodb.MongoExpression; /** * An {@link AggregationExpression} can be used with field expressions in aggregation pipeline stages like * {@code project} and {@code group}. + *

          + * The {@link AggregationExpression expressions} {@link #toDocument(AggregationOperationContext)} method is called during + * the mapping process to obtain the mapped, ready to use representation that can be handed over to the driver as part + * of an {@link AggregationOperation pipleine stage}. * * @author Thomas Darimont * @author Oliver Gierke * @author Christoph Strobl */ -public interface AggregationExpression { +public interface AggregationExpression extends MongoExpression { + + /** + * Create an {@link AggregationExpression} out of a given {@link MongoExpression} to ensure the resulting + * {@link MongoExpression#toDocument() Document} is mapped against the {@link AggregationOperationContext}.
          + * If the given expression is already an {@link AggregationExpression} the very same instance is returned. + * + * @param expression must not be {@literal null}. + * @return never {@literal null}. + * @since 3.2 + */ + static AggregationExpression from(MongoExpression expression) { + + if (expression instanceof AggregationExpression aggregationExpression) { + return aggregationExpression; + } + + return context -> context.getMappedObject(expression.toDocument()); + } + + /** + * Obtain the as is (unmapped) representation of the {@link AggregationExpression}. Use + * {@link #toDocument(AggregationOperationContext)} with a matching {@link AggregationOperationContext context} to + * engage domain type mapping including field name resolution. + * + * @see org.springframework.data.mongodb.MongoExpression#toDocument() + */ + @Override + default Document toDocument() { + return toDocument(Aggregation.DEFAULT_CONTEXT); + } /** * Turns the {@link AggregationExpression} into a {@link Document} within the given * {@link AggregationOperationContext}. * - * @param context - * @return + * @param context must not be {@literal null}. + * @return the MongoDB native ({@link Document}) form of the expression. */ Document toDocument(AggregationOperationContext context); } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionCriteria.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionCriteria.java new file mode 100644 index 0000000000..1ae935a92b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionCriteria.java @@ -0,0 +1,58 @@ +/* + * Copyright 2023-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.springframework.data.mongodb.core.aggregation.EvaluationOperators.Expr; +import org.springframework.data.mongodb.core.query.CriteriaDefinition; + +/** + * A {@link CriteriaDefinition criteria} to use {@code $expr} within a + * {@link org.springframework.data.mongodb.core.query.Query}. + * + * @author Christoph Strobl + * @since 4.1 + */ +public class AggregationExpressionCriteria implements CriteriaDefinition { + + private final AggregationExpression expression; + + AggregationExpressionCriteria(AggregationExpression expression) { + this.expression = expression; + } + + /** + * @param expression must not be {@literal null}. + * @return new instance of {@link AggregationExpressionCriteria}. + */ + public static AggregationExpressionCriteria whereExpr(AggregationExpression expression) { + return new AggregationExpressionCriteria(expression); + } + + @Override + public Document getCriteriaObject() { + + if (expression instanceof Expr expr) { + return new Document(getKey(), expr.get(0)); + } + return new Document(getKey(), expression); + } + + @Override + public String getKey() { + return "$expr"; + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionTransformer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionTransformer.java index 9775e1c0b9..e33c565d11 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionTransformer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationExpressionTransformer.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -16,12 +16,12 @@ package org.springframework.data.mongodb.core.aggregation; import org.bson.Document; +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.aggregation.AggregationExpressionTransformer.AggregationExpressionTransformationContext; import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; import org.springframework.data.mongodb.core.spel.ExpressionNode; import org.springframework.data.mongodb.core.spel.ExpressionTransformationContextSupport; import org.springframework.data.mongodb.core.spel.ExpressionTransformer; -import org.springframework.lang.Nullable; import org.springframework.util.Assert; /** @@ -53,21 +53,21 @@ class AggregationExpressionTransformationContext * @param currentNode must not be {@literal null}. * @param parentNode may be {@literal null}. * @param previousOperationObject may be {@literal null}. - * @param aggregationContext must not be {@literal null}. + * @param context must not be {@literal null}. */ public AggregationExpressionTransformationContext(T currentNode, @Nullable ExpressionNode parentNode, @Nullable Document previousOperationObject, AggregationOperationContext context) { super(currentNode, parentNode, previousOperationObject); - Assert.notNull(context, "AggregationOperationContext must not be null!"); + Assert.notNull(context, "AggregationOperationContext must not be null"); this.aggregationContext = context; } /** * Returns the underlying {@link AggregationOperationContext}. * - * @return + * @return never {@literal null}. */ public AggregationOperationContext getAggregationContext() { return aggregationContext; @@ -76,7 +76,7 @@ public AggregationOperationContext getAggregationContext() { /** * Returns the {@link FieldReference} for the current {@link ExpressionNode}. * - * @return + * @return never {@literal null}. */ public FieldReference getFieldReference() { return aggregationContext.getReference(getCurrentNode().getName()); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationFunctionExpressions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationFunctionExpressions.java deleted file mode 100644 index f688b14725..0000000000 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationFunctionExpressions.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright 2015-2018 the original author or authors. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.springframework.data.mongodb.core.aggregation; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; - -import org.bson.Document; -import org.springframework.util.Assert; - -/** - * An enum of supported {@link AggregationExpression}s in aggregation pipeline stages. - * - * @author Thomas Darimont - * @author Oliver Gierke - * @author Christoph Strobl - * @author Mark Paluch - * @since 1.7 - * @deprecated since 1.10. Please use {@link ArithmeticOperators} and {@link ComparisonOperators} instead. - */ -@Deprecated -public enum AggregationFunctionExpressions { - - SIZE, CMP, EQ, GT, GTE, LT, LTE, NE, SUBTRACT, ADD, MULTIPLY; - - /** - * Returns an {@link AggregationExpression} build from the current {@link Enum} name and the given parameters. - * - * @param parameters must not be {@literal null} - * @return - */ - public AggregationExpression of(Object... parameters) { - - Assert.notNull(parameters, "Parameters must not be null!"); - return new FunctionExpression(name().toLowerCase(), parameters); - } - - /** - * An {@link AggregationExpression} representing a function call. - * - * @author Thomas Darimont - * @author Oliver Gierke - * @since 1.7 - */ - static class FunctionExpression implements AggregationExpression { - - private final String name; - private final List values; - - /** - * Creates a new {@link FunctionExpression} for the given name and values. - * - * @param name must not be {@literal null} or empty. - * @param values must not be {@literal null}. - */ - public FunctionExpression(String name, Object[] values) { - - Assert.hasText(name, "Name must not be null!"); - Assert.notNull(values, "Values must not be null!"); - - this.name = name; - this.values = Arrays.asList(values); - } - - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.Expression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ - @Override - public Document toDocument(AggregationOperationContext context) { - - List args = new ArrayList(values.size()); - - for (Object value : values) { - args.add(unpack(value, context)); - } - - return new Document("$" + name, args); - } - - private static Object unpack(Object value, AggregationOperationContext context) { - - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); - } - - if (value instanceof Field) { - return context.getReference((Field) value).toString(); - } - - return value; - } - } -} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperation.java index c1938533f0..923a1e73cf 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,9 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.util.Collections; +import java.util.List; + import org.bson.Document; /** @@ -32,7 +35,34 @@ public interface AggregationOperation { * Turns the {@link AggregationOperation} into a {@link Document} by using the given * {@link AggregationOperationContext}. * + * @param context the {@link AggregationOperationContext} to operate within. Must not be {@literal null}. * @return the Document + * @deprecated since 2.2 in favor of {@link #toPipelineStages(AggregationOperationContext)}. */ + @Deprecated Document toDocument(AggregationOperationContext context); + + /** + * Turns the {@link AggregationOperation} into list of {@link Document stages} by using the given + * {@link AggregationOperationContext}. This allows a single {@link AggregationOptions} to add additional stages for + * eg. {@code $sort} or {@code $limit}. + * + * @param context the {@link AggregationOperationContext} to operate within. Must not be {@literal null}. + * @return the pipeline stages to run through. Never {@literal null}. + * @since 2.2 + */ + default List toPipelineStages(AggregationOperationContext context) { + return Collections.singletonList(toDocument(context)); + } + + /** + * Return the MongoDB operator that is used for this {@link AggregationOperation}. Aggregation operations should + * implement this method to avoid document rendering. + * + * @return the operator used for this {@link AggregationOperation}. + * @since 3.0.2 + */ + default String getOperator() { + return toDocument(Aggregation.DEFAULT_CONTEXT).keySet().iterator().next(); + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationContext.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationContext.java index 8f2285b3de..5027328461 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationContext.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationContext.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,17 +15,30 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.beans.PropertyDescriptor; +import java.lang.reflect.Method; +import java.util.Arrays; + import org.bson.Document; +import org.bson.codecs.configuration.CodecRegistry; +import org.jspecify.annotations.Nullable; +import org.springframework.beans.BeanUtils; +import org.springframework.data.mongodb.CodecRegistryProvider; import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; +import org.springframework.util.Assert; +import org.springframework.util.ReflectionUtils; + +import com.mongodb.MongoClientSettings; /** * The context for an {@link AggregationOperation}. * * @author Oliver Gierke * @author Christoph Strobl + * @author Mark Paluch * @since 1.3 */ -public interface AggregationOperationContext { +public interface AggregationOperationContext extends CodecRegistryProvider { /** * Returns the mapped {@link Document}, potentially converting the source considering mapping metadata etc. @@ -33,23 +46,115 @@ public interface AggregationOperationContext { * @param document will never be {@literal null}. * @return must not be {@literal null}. */ - Document getMappedObject(Document document); + default Document getMappedObject(Document document) { + return getMappedObject(document, null); + } + + /** + * Returns the mapped {@link Document}, potentially converting the source considering mapping metadata for the given + * type. + * + * @param document will never be {@literal null}. + * @param type can be {@literal null}. + * @return must not be {@literal null}. + * @since 2.2 + */ + Document getMappedObject(Document document, @Nullable Class type); /** - * Returns a {@link FieldReference} for the given field or {@literal null} if the context does not expose the given - * field. + * Returns a {@link FieldReference} for the given field. * * @param field must not be {@literal null}. - * @return + * @return the {@link FieldReference} for the given {@link Field}. + * @throws IllegalArgumentException if the context does not expose a field with the given name */ FieldReference getReference(Field field); /** - * Returns the {@link FieldReference} for the field with the given name or {@literal null} if the context does not - * expose a field with the given name. + * Returns the {@link FieldReference} for the field with the given name. * * @param name must not be {@literal null} or empty. - * @return + * @return the {@link FieldReference} for the field with given {@literal name}. + * @throws IllegalArgumentException if the context does not expose a field with the given name */ FieldReference getReference(String name); + + /** + * Returns the {@link Fields} exposed by the type. May be a {@literal class} or an {@literal interface}. The default + * implementation uses {@link BeanUtils#getPropertyDescriptors(Class) property descriptors} discover fields from a + * {@link Class}. + * + * @param type must not be {@literal null}. + * @return never {@literal null}. + * @since 2.2 + * @see BeanUtils#getPropertyDescriptor(Class, String) + */ + default Fields getFields(Class type) { + + Assert.notNull(type, "Type must not be null"); + + return Fields.fields(Arrays.stream(BeanUtils.getPropertyDescriptors(type)) // + .filter(it -> { // object and default methods + Method method = it.getReadMethod(); + if (method == null) { + return false; + } + if (ReflectionUtils.isObjectMethod(method)) { + return false; + } + return !method.isDefault(); + }) // + .map(PropertyDescriptor::getName) // + .toArray(String[]::new)); + } + + /** + * Create a nested {@link AggregationOperationContext} from this context that exposes {@link ExposedFields fields}. + *

          + * Implementations of {@link AggregationOperationContext} retain their {@link FieldLookupPolicy}. If no policy is + * specified, then lookup defaults to {@link FieldLookupPolicy#strict()}. + * + * @param fields the fields to expose, must not be {@literal null}. + * @return the new {@link AggregationOperationContext} exposing {@code fields}. + * @since 4.3.1 + */ + default AggregationOperationContext expose(ExposedFields fields) { + return new ExposedFieldsAggregationOperationContext(fields, this, FieldLookupPolicy.strict()); + } + + /** + * Create a nested {@link AggregationOperationContext} from this context that inherits exposed fields from this + * context and exposes {@link ExposedFields fields}. + *

          + * Implementations of {@link AggregationOperationContext} retain their {@link FieldLookupPolicy}. If no policy is + * specified, then lookup defaults to {@link FieldLookupPolicy#strict()}. + * + * @param fields the fields to expose, must not be {@literal null}. + * @return the new {@link AggregationOperationContext} exposing {@code fields}. + * @since 4.3.1 + */ + default AggregationOperationContext inheritAndExpose(ExposedFields fields) { + return new InheritingExposedFieldsAggregationOperationContext(fields, this, FieldLookupPolicy.strict()); + } + + /** + * This toggle allows the {@link AggregationOperationContext context} to use any given field name without checking for + * its existence. Typically, the {@link AggregationOperationContext} fails when referencing unknown fields, those that + * are not present in one of the previous stages or the input source, throughout the pipeline. + * + * @return a more relaxed {@link AggregationOperationContext}. + * @since 3.0 + * @deprecated since 4.3.1, {@link FieldLookupPolicy} should be specified explicitly when creating the + * AggregationOperationContext. + */ + @Deprecated(since = "4.3.1", forRemoval = true) + default AggregationOperationContext continueOnMissingFieldReference() { + return this; + } + + @Override + default CodecRegistry getCodecRegistry() { + return MongoClientSettings.getDefaultCodecRegistry(); + } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java index 0784af49fe..6437ec981d 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOperationRenderer.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,6 +19,7 @@ import java.util.List; import org.bson.Document; +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.aggregation.ExposedFields.DirectFieldReference; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; import org.springframework.data.mongodb.core.aggregation.ExposedFields.FieldReference; @@ -52,54 +53,76 @@ static List toDocument(List operations, Aggregat for (AggregationOperation operation : operations) { - operationDocuments.add(operation.toDocument(contextToUse)); + operationDocuments.addAll(operation.toPipelineStages(contextToUse)); - if (operation instanceof FieldsExposingAggregationOperation) { + if (operation instanceof FieldsExposingAggregationOperation exposedFieldsOperation) { - FieldsExposingAggregationOperation exposedFieldsOperation = (FieldsExposingAggregationOperation) operation; ExposedFields fields = exposedFieldsOperation.getFields(); if (operation instanceof InheritsFieldsAggregationOperation || exposedFieldsOperation.inheritsFields()) { - contextToUse = new InheritingExposedFieldsAggregationOperationContext(fields, contextToUse); + contextToUse = contextToUse.inheritAndExpose(fields); } else { - contextToUse = fields.exposesNoFields() ? DEFAULT_CONTEXT - : new ExposedFieldsAggregationOperationContext(exposedFieldsOperation.getFields(), contextToUse); + contextToUse = fields.exposesNoFields() ? ConverterAwareNoOpContext.instance(rootContext) + : contextToUse.expose(fields); } } + } return operationDocuments; } + private static class ConverterAwareNoOpContext implements AggregationOperationContext { + + AggregationOperationContext ctx; + + static ConverterAwareNoOpContext instance(AggregationOperationContext ctx) { + + if(ctx instanceof ConverterAwareNoOpContext noOpContext) { + return noOpContext; + } + + return new ConverterAwareNoOpContext(ctx); + } + + ConverterAwareNoOpContext(AggregationOperationContext ctx) { + this.ctx = ctx; + } + + @Override + public Document getMappedObject(Document document, @Nullable Class type) { + return ctx.getMappedObject(document, null); + } + + @Override + public FieldReference getReference(Field field) { + return new DirectFieldReference(new ExposedField(field, true)); + } + + @Override + public FieldReference getReference(String name) { + return new DirectFieldReference(new ExposedField(new AggregationField(name), true)); + } + } + /** * Simple {@link AggregationOperationContext} that just returns {@link FieldReference}s as is. * * @author Oliver Gierke + * @author Christoph Strobl */ private static class NoOpAggregationOperationContext implements AggregationOperationContext { - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getMappedObject(org.bson.Document) - */ @Override - public Document getMappedObject(Document document) { + public Document getMappedObject(Document document, @Nullable Class type) { return document; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(org.springframework.data.mongodb.core.aggregation.ExposedFields.AvailableField) - */ @Override public FieldReference getReference(Field field) { return new DirectFieldReference(new ExposedField(field, true)); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperationContext#getReference(java.lang.String) - */ @Override public FieldReference getReference(String name) { return new DirectFieldReference(new ExposedField(new AggregationField(name), true)); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOptions.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOptions.java index 29b30a25b1..278da408c6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOptions.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationOptions.java @@ -1,11 +1,11 @@ /* - * Copyright 2014-2018 the original author or authors. + * Copyright 2014-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,40 +15,64 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.time.Duration; import java.util.Optional; import org.bson.Document; +import org.jspecify.annotations.Nullable; +import org.springframework.data.mongodb.core.ReadConcernAware; +import org.springframework.data.mongodb.core.ReadPreferenceAware; import org.springframework.data.mongodb.core.query.Collation; -import org.springframework.lang.Nullable; +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.lang.Contract; import org.springframework.util.Assert; -import com.mongodb.DBObject; +import com.mongodb.ReadConcern; +import com.mongodb.ReadPreference; /** * Holds a set of configurable aggregation options that can be used within an aggregation pipeline. A list of support - * aggregation options can be found in the MongoDB reference documentation - * https://docs.mongodb.org/manual/reference/command/aggregate/#aggregate + * aggregation options can be found in the + * MongoDB reference documentation. + *

          + * As off 4.3 {@link #allowDiskUse} can be {@literal null}, indicating use of server default, and may only be applied if + * {@link #isAllowDiskUseSet() explicitly set}. For compatibility reasons {@link #isAllowDiskUse()} will remain + * returning {@literal false} if the no value has been set. * * @author Thomas Darimont * @author Oliver Gierke * @author Christoph Strobl * @author Mark Paluch + * @author Yadhukrishna S Pai + * @author Soumya Prakash Behera * @see Aggregation#withOptions(AggregationOptions) * @see TypedAggregation#withOptions(AggregationOptions) * @since 1.6 */ -public class AggregationOptions { +public class AggregationOptions implements ReadConcernAware, ReadPreferenceAware { private static final String BATCH_SIZE = "batchSize"; private static final String CURSOR = "cursor"; private static final String EXPLAIN = "explain"; private static final String ALLOW_DISK_USE = "allowDiskUse"; private static final String COLLATION = "collation"; + private static final String COMMENT = "comment"; + private static final String MAX_TIME = "maxTimeMS"; + private static final String HINT = "hint"; - private final boolean allowDiskUse; + private final Optional allowDiskUse; private final boolean explain; private final Optional cursor; private final Optional collation; + private final Optional comment; + private final Optional hint; + + private Optional readConcern; + + private Optional readPreference; + private Duration maxTime = Duration.ZERO; + private ResultOptions resultOptions = ResultOptions.READ; + private DomainTypeMapping domainTypeMapping = DomainTypeMapping.RELAXED; /** * Creates a new {@link AggregationOptions}. @@ -57,7 +81,7 @@ public class AggregationOptions { * @param explain whether to get the execution plan for the aggregation instead of the actual results. * @param cursor can be {@literal null}, used to pass additional options to the aggregation. */ - public AggregationOptions(boolean allowDiskUse, boolean explain, Document cursor) { + public AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor) { this(allowDiskUse, explain, cursor, null); } @@ -73,11 +97,48 @@ public AggregationOptions(boolean allowDiskUse, boolean explain, Document cursor */ public AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor, @Nullable Collation collation) { + this(allowDiskUse, explain, cursor, collation, null, null); + } + + /** + * Creates a new {@link AggregationOptions}. + * + * @param allowDiskUse whether to off-load intensive sort-operations to disk. + * @param explain whether to get the execution plan for the aggregation instead of the actual results. + * @param cursor can be {@literal null}, used to pass additional options (such as {@code batchSize}) to the + * aggregation. + * @param collation collation for string comparison. Can be {@literal null}. + * @param comment execution comment. Can be {@literal null}. + * @since 2.2 + */ + public AggregationOptions(boolean allowDiskUse, boolean explain, @Nullable Document cursor, + @Nullable Collation collation, @Nullable String comment) { + this(allowDiskUse, explain, cursor, collation, comment, null); + } - this.allowDiskUse = allowDiskUse; + /** + * Creates a new {@link AggregationOptions}. + * + * @param allowDiskUse whether to off-load intensive sort-operations to disk. + * @param explain whether to get the execution plan for the aggregation instead of the actual results. + * @param cursor can be {@literal null}, used to pass additional options (such as {@code batchSize}) to the + * aggregation. + * @param collation collation for string comparison. Can be {@literal null}. + * @param comment execution comment. Can be {@literal null}. + * @param hint can be {@literal null}, used to provide an index that would be forcibly used by query optimizer. + * @since 3.1 + */ + private AggregationOptions(@Nullable Boolean allowDiskUse, boolean explain, @Nullable Document cursor, + @Nullable Collation collation, @Nullable String comment, @Nullable Object hint) { + + this.allowDiskUse = Optional.ofNullable(allowDiskUse); this.explain = explain; this.cursor = Optional.ofNullable(cursor); this.collation = Optional.ofNullable(collation); + this.comment = Optional.ofNullable(comment); + this.hint = Optional.ofNullable(hint); + this.readConcern = Optional.empty(); + this.readPreference = Optional.empty(); } /** @@ -93,7 +154,7 @@ public AggregationOptions(boolean allowDiskUse, boolean explain, int cursorBatch } /** - * Creates new {@link AggregationOptions} given {@link DBObject} containing aggregation options. + * Creates new {@link AggregationOptions} given {@link Document} containing aggregation options. * * @param document must not be {@literal null}. * @return the {@link AggregationOptions}. @@ -101,15 +162,21 @@ public AggregationOptions(boolean allowDiskUse, boolean explain, int cursorBatch */ public static AggregationOptions fromDocument(Document document) { - Assert.notNull(document, "Document must not be null!"); + Assert.notNull(document, "Document must not be null"); - boolean allowDiskUse = document.getBoolean(ALLOW_DISK_USE, false); + Boolean allowDiskUse = document.get(ALLOW_DISK_USE, Boolean.class); boolean explain = document.getBoolean(EXPLAIN, false); Document cursor = document.get(CURSOR, Document.class); Collation collation = document.containsKey(COLLATION) ? Collation.from(document.get(COLLATION, Document.class)) : null; + String comment = document.getString(COMMENT); + Document hint = document.get(HINT, Document.class); - return new AggregationOptions(allowDiskUse, explain, cursor, collation); + AggregationOptions options = new AggregationOptions(allowDiskUse, explain, cursor, collation, comment, hint); + if (document.containsKey(MAX_TIME)) { + options.maxTime = Duration.ofMillis(document.getLong(MAX_TIME)); + } + return options; } /** @@ -123,19 +190,29 @@ public static Builder builder() { } /** - * Enables writing to temporary files. When set to true, aggregation stages can write data to the _tmp subdirectory in - * the dbPath directory. + * Enables writing to temporary files. When set to {@literal true}, aggregation stages can write data to the + * {@code _tmp} subdirectory in the {@code dbPath} directory. * - * @return + * @return {@literal true} if enabled; {@literal false} otherwise (or if not set). */ public boolean isAllowDiskUse() { - return allowDiskUse; + return allowDiskUse.orElse(false); + } + + /** + * Return whether {@link #isAllowDiskUse} is configured. + * + * @return {@literal true} if is {@code allowDiskUse} is configured, {@literal false} otherwise. + * @since 4.2.5 + */ + public boolean isAllowDiskUseSet() { + return allowDiskUse.isPresent(); } /** * Specifies to return the information on the processing of the pipeline. * - * @return + * @return {@literal true} if enabled. */ public boolean isExplain() { return explain; @@ -160,7 +237,7 @@ public Integer getCursorBatchSize() { /** * Specify a document that contains options that control the creation of the cursor object. * - * @return + * @return never {@literal null}. */ public Optional getCursor() { return cursor; @@ -169,13 +246,99 @@ public Optional getCursor() { /** * Get collation settings for string comparison. * - * @return + * @return never {@literal null}. * @since 2.0 */ public Optional getCollation() { return collation; } + /** + * Get the comment for the aggregation. + * + * @return never {@literal null}. + * @since 2.2 + */ + public Optional getComment() { + return comment; + } + + /** + * Get the hint used to fulfill the aggregation. + * + * @return never {@literal null}. + * @since 3.1 + * @deprecated since 4.1, use {@link #getHintObject()} instead. + */ + public Optional getHint() { + return hint.map(it -> { + if (it instanceof Document doc) { + return doc; + } + if (it instanceof String hintString) { + if (BsonUtils.isJsonDocument(hintString)) { + return BsonUtils.parse(hintString, null); + } + } + throw new IllegalStateException("Unable to read hint of type %s".formatted(it.getClass())); + }); + } + + /** + * Get the hint used to fulfill the aggregation. + * + * @return never {@literal null}. + * @since 4.1 + */ + public Optional getHintObject() { + return hint; + } + + @Override + public boolean hasReadConcern() { + return readConcern.isPresent(); + } + + @Override + public @Nullable ReadConcern getReadConcern() { + return readConcern.orElse(null); + } + + @Override + public boolean hasReadPreference() { + return readPreference.isPresent(); + } + + @Override + public @Nullable ReadPreference getReadPreference() { + return readPreference.orElse(null); + } + + /** + * @return the time limit for processing. {@link Duration#ZERO} is used for the default unbounded behavior. + * @since 3.0 + */ + public Duration getMaxTime() { + return maxTime; + } + + /** + * @return {@literal true} to skip results when running an aggregation. Useful in combination with {@code $merge} or + * {@code $out}. + * @since 3.0.2 + */ + public boolean isSkipResults() { + return ResultOptions.SKIP.equals(resultOptions); + } + + /** + * @return the domain type mapping strategy do apply. Never {@literal null}. + * @since 3.2 + */ + public DomainTypeMapping getDomainTypeMapping() { + return domainTypeMapping; + } + /** * Returns a new potentially adjusted copy for the given {@code aggregationCommandObject} with the configuration * applied. @@ -187,14 +350,18 @@ Document applyAndReturnPotentiallyChangedCommand(Document command) { Document result = new Document(command); - if (allowDiskUse && !result.containsKey(ALLOW_DISK_USE)) { - result.put(ALLOW_DISK_USE, allowDiskUse); + if (isAllowDiskUseSet() && !result.containsKey(ALLOW_DISK_USE)) { + result.put(ALLOW_DISK_USE, isAllowDiskUse()); } if (explain && !result.containsKey(EXPLAIN)) { result.put(EXPLAIN, explain); } + if (result.containsKey(HINT)) { + hint.ifPresent(val -> result.append(HINT, val)); + } + if (!result.containsKey(CURSOR)) { cursor.ifPresent(val -> result.put(CURSOR, val)); } @@ -203,29 +370,46 @@ Document applyAndReturnPotentiallyChangedCommand(Document command) { collation.map(Collation::toDocument).ifPresent(val -> result.append(COLLATION, val)); } + if (hasExecutionTimeLimit() && !result.containsKey(MAX_TIME)) { + result.append(MAX_TIME, maxTime.toMillis()); + } + return result; } /** * Returns a {@link Document} representation of this {@link AggregationOptions}. * - * @return + * @return never {@literal null}. */ public Document toDocument() { Document document = new Document(); - document.put(ALLOW_DISK_USE, allowDiskUse); + if (isAllowDiskUseSet()) { + document.put(ALLOW_DISK_USE, isAllowDiskUse()); + } document.put(EXPLAIN, explain); cursor.ifPresent(val -> document.put(CURSOR, val)); collation.ifPresent(val -> document.append(COLLATION, val.toDocument())); + comment.ifPresent(val -> document.append(COMMENT, val)); + hint.ifPresent(val -> document.append(HINT, val)); + + if (hasExecutionTimeLimit()) { + document.append(MAX_TIME, maxTime.toMillis()); + } return document; } - /* (non-Javadoc) - * @see java.lang.Object#toString() + /** + * @return {@literal true} if {@link #maxTime} is set to a positive value. + * @since 3.0 */ + public boolean hasExecutionTimeLimit() { + return !maxTime.isZero() && !maxTime.isNegative(); + } + @Override public String toString() { return toDocument().toJson(); @@ -243,17 +427,25 @@ static Document createCursor(int cursorBatchSize) { */ public static class Builder { - private boolean allowDiskUse; + private @Nullable Boolean allowDiskUse; private boolean explain; private @Nullable Document cursor; private @Nullable Collation collation; + private @Nullable String comment; + private @Nullable Object hint; + private @Nullable ReadConcern readConcern; + private @Nullable ReadPreference readPreference; + private @Nullable Duration maxTime; + private @Nullable ResultOptions resultOptions; + private @Nullable DomainTypeMapping domainTypeMapping; /** * Defines whether to off-load intensive sort-operations to disk. * - * @param allowDiskUse - * @return + * @param allowDiskUse use {@literal true} to allow disk use during the aggregation. + * @return this. */ + @Contract("_ -> this") public Builder allowDiskUse(boolean allowDiskUse) { this.allowDiskUse = allowDiskUse; @@ -263,9 +455,10 @@ public Builder allowDiskUse(boolean allowDiskUse) { /** * Defines whether to get the execution plan for the aggregation instead of the actual results. * - * @param explain - * @return + * @param explain use {@literal true} to enable explain feature. + * @return this. */ + @Contract("_ -> this") public Builder explain(boolean explain) { this.explain = explain; @@ -275,9 +468,10 @@ public Builder explain(boolean explain) { /** * Additional options to the aggregation. * - * @param cursor - * @return + * @param cursor must not be {@literal null}. + * @return this. */ + @Contract("_ -> this") public Builder cursor(Document cursor) { this.cursor = cursor; @@ -287,10 +481,11 @@ public Builder cursor(Document cursor) { /** * Define the initial cursor batch size. * - * @param batchSize - * @return + * @param batchSize use a positive int. + * @return this. * @since 2.0 */ + @Contract("_ -> this") public Builder cursorBatchSize(int batchSize) { this.cursor = createCursor(batchSize); @@ -301,21 +496,221 @@ public Builder cursorBatchSize(int batchSize) { * Define collation settings for string comparison. * * @param collation can be {@literal null}. - * @return + * @return this. + * @since 2.0 */ + @Contract("_ -> this") public Builder collation(@Nullable Collation collation) { this.collation = collation; return this; } + /** + * Define a comment to describe the execution. + * + * @param comment can be {@literal null}. + * @return this. + * @since 2.2 + */ + @Contract("_ -> this") + public Builder comment(@Nullable String comment) { + + this.comment = comment; + return this; + } + + /** + * Define a hint that is used by query optimizer to to fulfill the aggregation. + * + * @param hint can be {@literal null}. + * @return this. + * @since 3.1 + */ + @Contract("_ -> this") + public Builder hint(@Nullable Document hint) { + + this.hint = hint; + return this; + } + + /** + * Define a hint that is used by query optimizer to to fulfill the aggregation. + * + * @param indexName can be {@literal null}. + * @return this. + * @since 4.1 + */ + @Contract("_ -> this") + public Builder hint(@Nullable String indexName) { + + this.hint = indexName; + return this; + } + + /** + * Define a {@link ReadConcern} to apply to the aggregation. + * + * @param readConcern can be {@literal null}. + * @return this. + * @since 4.1 + */ + @Contract("_ -> this") + public Builder readConcern(@Nullable ReadConcern readConcern) { + + this.readConcern = readConcern; + return this; + } + + /** + * Define a {@link ReadPreference} to apply to the aggregation. + * + * @param readPreference can be {@literal null}. + * @return this. + * @since 4.1 + */ + @Contract("_ -> this") + public Builder readPreference(@Nullable ReadPreference readPreference) { + + this.readPreference = readPreference; + return this; + } + + /** + * Set the time limit for processing. + * + * @param maxTime {@link Duration#ZERO} is used for the default unbounded behavior. {@link Duration#isNegative() + * Negative} values will be ignored. + * @return this. + * @since 3.0 + */ + @Contract("_ -> this") + public Builder maxTime(@Nullable Duration maxTime) { + + this.maxTime = maxTime; + return this; + } + + /** + * Run the aggregation, but do NOT read the aggregation result from the store.
          + * If the expected result of the aggregation is rather large, eg. when using an {@literal $out} operation, this + * option allows to execute the aggregation without having the cursor return the operation result. + * + * @return this. + * @since 3.0.2 + */ + @Contract("-> this") + public Builder skipOutput() { + + this.resultOptions = ResultOptions.SKIP; + return this; + } + + /** + * Apply a strict domain type mapping considering {@link org.springframework.data.mongodb.core.mapping.Field} + * annotations throwing errors for non-existent, but referenced fields. + * + * @return this. + * @since 3.2 + */ + @Contract("-> this") + public Builder strictMapping() { + + this.domainTypeMapping = DomainTypeMapping.STRICT; + return this; + } + + /** + * Apply a relaxed domain type mapping considering {@link org.springframework.data.mongodb.core.mapping.Field} + * annotations using the user provided name if a referenced field does not exist. + * + * @return this. + * @since 3.2 + */ + @Contract("-> this") + public Builder relaxedMapping() { + + this.domainTypeMapping = DomainTypeMapping.RELAXED; + return this; + } + + /** + * Apply no domain type mapping at all taking the pipeline as-is. + * + * @return this. + * @since 3.2 + */ + @Contract("-> this") + public Builder noMapping() { + + this.domainTypeMapping = DomainTypeMapping.NONE; + return this; + } + /** * Returns a new {@link AggregationOptions} instance with the given configuration. * - * @return + * @return new instance of {@link AggregationOptions}. */ + @Contract("-> new") public AggregationOptions build() { - return new AggregationOptions(allowDiskUse, explain, cursor, collation); + + AggregationOptions options = new AggregationOptions(allowDiskUse, explain, cursor, collation, comment, hint); + if (maxTime != null) { + options.maxTime = maxTime; + } + if (resultOptions != null) { + options.resultOptions = resultOptions; + } + if (domainTypeMapping != null) { + options.domainTypeMapping = domainTypeMapping; + } + if (readConcern != null) { + options.readConcern = Optional.of(readConcern); + } + if (readPreference != null) { + options.readPreference = Optional.of(readPreference); + } + + return options; } } + + /** + * @since 3.0 + */ + private enum ResultOptions { + + /** + * Just do it!, and do not read the operation result. + */ + SKIP, + /** + * Read the aggregation result from the cursor. + */ + READ + } + + /** + * Aggregation pipeline Domain type mappings supported by the mapping layer. + * + * @since 3.2 + */ + public enum DomainTypeMapping { + + /** + * Mapping throws errors for non-existent, but referenced fields. + */ + STRICT, + + /** + * Fields that do not exist in the model are treated as-is. + */ + RELAXED, + + /** + * Do not attempt to map fields against the model and treat the entire pipeline as-is. + */ + NONE + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationPipeline.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationPipeline.java new file mode 100644 index 0000000000..f06803997b --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationPipeline.java @@ -0,0 +1,179 @@ +/* + * Copyright 2020-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.Predicate; + +import org.bson.Document; +import org.jspecify.annotations.Nullable; +import org.springframework.lang.Contract; +import org.springframework.util.Assert; +import org.springframework.util.CollectionUtils; + +/** + * The {@link AggregationPipeline} holds the collection of {@link AggregationOperation aggregation stages}. + * + * @author Christoph Strobl + * @author Mark Paluch + * @since 3.0.2 + */ +public class AggregationPipeline { + + private final List pipeline; + + public static AggregationPipeline of(AggregationOperation... stages) { + return new AggregationPipeline(Arrays.asList(stages)); + } + + /** + * Create an empty pipeline + */ + public AggregationPipeline() { + this(new ArrayList<>()); + } + + /** + * Create a new pipeline with given {@link AggregationOperation stages}. + * + * @param aggregationOperations must not be {@literal null}. + */ + public AggregationPipeline(List aggregationOperations) { + + Assert.notNull(aggregationOperations, "AggregationOperations must not be null"); + pipeline = new ArrayList<>(aggregationOperations); + } + + /** + * Append the given {@link AggregationOperation stage} to the pipeline. + * + * @param aggregationOperation must not be {@literal null}. + * @return this. + */ + @Contract("_ -> this") + public AggregationPipeline add(AggregationOperation aggregationOperation) { + + Assert.notNull(aggregationOperation, "AggregationOperation must not be null"); + + pipeline.add(aggregationOperation); + return this; + } + + /** + * Get the list of {@link AggregationOperation aggregation stages}. + * + * @return never {@literal null}. + */ + public List getOperations() { + return Collections.unmodifiableList(pipeline); + } + + public @Nullable AggregationOperation firstOperation() { + return CollectionUtils.firstElement(pipeline); + } + + public @Nullable AggregationOperation lastOperation() { + return CollectionUtils.lastElement(pipeline); + } + + List toDocuments(AggregationOperationContext context) { + + verify(); + return AggregationOperationRenderer.toDocument(pipeline, context); + } + + /** + * @return {@literal true} if the last aggregation stage is either {@literal $out} or {@literal $merge}. + */ + public boolean isOutOrMerge() { + + if (isEmpty()) { + return false; + } + + AggregationOperation operation = lastOperation(); + return operation != null && (isOut(operation) || isMerge(operation)); + } + + void verify() { + + // check $out/$merge is the last operation if it exists + for (AggregationOperation operation : pipeline) { + + if (isOut(operation) && !isLast(operation)) { + throw new IllegalArgumentException("The $out operator must be the last stage in the pipeline"); + } + + if (isMerge(operation) && !isLast(operation)) { + throw new IllegalArgumentException("The $merge operator must be the last stage in the pipeline"); + } + } + } + + /** + * Return whether this aggregation pipeline defines a {@code $unionWith} stage that may contribute documents from + * other collections. Checking for presence of union stages is useful when attempting to determine the aggregation + * element type for mapping metadata computation. + * + * @return {@literal true} the aggregation pipeline makes use of {@code $unionWith}. + * @since 3.1 + */ + public boolean containsUnionWith() { + return containsOperation(AggregationPipeline::isUnionWith); + } + + /** + * @return {@literal true} if the pipeline does not contain any stages. + * @since 3.1 + */ + public boolean isEmpty() { + return pipeline.isEmpty(); + } + + private boolean containsOperation(Predicate predicate) { + + if (isEmpty()) { + return false; + } + + for (AggregationOperation element : pipeline) { + if (predicate.test(element)) { + return true; + } + } + + return false; + } + + private boolean isLast(AggregationOperation aggregationOperation) { + return pipeline.indexOf(aggregationOperation) == pipeline.size() - 1; + } + + private static boolean isUnionWith(AggregationOperation operator) { + return operator instanceof UnionWithOperation || operator.getOperator().equals("$unionWith"); + } + + private static boolean isMerge(AggregationOperation operator) { + return operator instanceof MergeOperation || operator.getOperator().equals("$merge"); + } + + private static boolean isOut(AggregationOperation operator) { + return operator instanceof OutOperation || operator.getOperator().equals("$out"); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationResults.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationResults.java index dcd1a62a29..7b27739229 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationResults.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationResults.java @@ -1,11 +1,11 @@ /* - * Copyright 2013-2018 the original author or authors. + * Copyright 2013-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,7 +20,7 @@ import java.util.List; import org.bson.Document; -import org.springframework.lang.Nullable; +import org.jspecify.annotations.Nullable; import org.springframework.util.Assert; /** @@ -48,8 +48,8 @@ public class AggregationResults implements Iterable { */ public AggregationResults(List mappedResults, Document rawResults) { - Assert.notNull(mappedResults, "List of mapped results must not be null!"); - Assert.notNull(rawResults, "Raw results must not be null!"); + Assert.notNull(mappedResults, "List of mapped results must not be null"); + Assert.notNull(rawResults, "Raw results must not be null"); this.mappedResults = Collections.unmodifiableList(mappedResults); this.rawResults = rawResults; @@ -59,7 +59,7 @@ public AggregationResults(List mappedResults, Document rawResults) { /** * Returns the aggregation results. * - * @return + * @return the list of already mapped results or an empty one if none found. */ public List getMappedResults() { return mappedResults; @@ -68,19 +68,14 @@ public List getMappedResults() { /** * Returns the unique mapped result. Assumes no result or exactly one. * - * @return + * @return the single already mapped result object or raise an error if more than one found. * @throws IllegalArgumentException in case more than one result is available. */ - @Nullable - public T getUniqueMappedResult() { - Assert.isTrue(mappedResults.size() < 2, "Expected unique result or null, but got more than one!"); + public @Nullable T getUniqueMappedResult() { + Assert.isTrue(mappedResults.size() < 2, "Expected unique result or null, but got more than one"); return mappedResults.size() == 1 ? mappedResults.get(0) : null; } - /* - * (non-Javadoc) - * @see java.lang.Iterable#iterator() - */ public Iterator iterator() { return mappedResults.iterator(); } @@ -88,7 +83,7 @@ public Iterator iterator() { /** * Returns the server that has been used to perform the aggregation. * - * @return + * @return can be {@literal null}. */ @Nullable public String getServerUsed() { @@ -98,17 +93,17 @@ public String getServerUsed() { /** * Returns the raw result that was returned by the server. * - * @return + * @return the raw response. * @since 1.6 */ public Document getRawResults() { return rawResults; } - @Nullable - private String parseServerUsed() { + private @Nullable String parseServerUsed() { Object object = rawResults.get("serverUsed"); - return object instanceof String ? (String) object : null; + return object instanceof String stringValue ? stringValue : null; } + } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java index c1803cc484..c5b53ef0c6 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationSpELExpression.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,19 +20,19 @@ /** * An {@link AggregationExpression} that renders a MongoDB Aggregation Framework expression from the AST of a - * SpEL + * SpEL * expression.
          *
          * Samples:
          - * *
          + * 
            * // { $and: [ { $gt: [ "$qty", 100 ] }, { $lt: [ "$qty", 250 ] } ] }
            * expressionOf("qty > 100 && qty < 250);
            *
            * // { $cond : { if : { $gte : [ "$a", 42 ]}, then : "answer", else : "no-answer" } }
            * expressionOf("cond(a >= 42, 'answer', 'no-answer')");
          - * 
          * + * * * @author Christoph Strobl * @author Mark Paluch @@ -56,19 +56,18 @@ private AggregationSpELExpression(String rawExpression, Object[] parameters) { * * @param expressionString must not be {@literal null}. * @param parameters can be empty. - * @return + * @return new instance of {@link AggregationSpELExpression}. */ public static AggregationSpELExpression expressionOf(String expressionString, Object... parameters) { - Assert.notNull(expressionString, "ExpressionString must not be null!"); + Assert.notNull(expressionString, "ExpressionString must not be null"); return new AggregationSpELExpression(expressionString, parameters); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { - return (Document) TRANSFORMER.transform(rawExpression, context, parameters); + + Document doc = (Document) TRANSFORMER.transform(rawExpression, context, parameters); + return doc != null ? doc : new Document(); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java new file mode 100644 index 0000000000..9e8564c03e --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUpdate.java @@ -0,0 +1,307 @@ +/* + * Copyright 2019-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.StringJoiner; +import java.util.stream.Collectors; + +import org.bson.Document; +import org.jspecify.annotations.Nullable; +import org.springframework.data.mongodb.core.query.Query; +import org.springframework.data.mongodb.core.query.SerializationUtils; +import org.springframework.data.mongodb.core.query.UpdateDefinition; +import org.springframework.lang.Contract; +import org.springframework.util.Assert; + +/** + * Abstraction for {@code db.collection.update()} using an aggregation pipeline. Aggregation pipeline updates use a more + * expressive update statement expressing conditional updates based on current field values or updating one field using + * the value of another field(s). + * + *
          + * AggregationUpdate update = AggregationUpdate.update().set("average")
          + * 		.toValue(ArithmeticOperators.valueOf("tests").avg()).set("grade")
          + * 		.toValue(ConditionalOperators
          + * 				.switchCases(CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(90)).then("A"),
          + * 						CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(80)).then("B"),
          + * 						CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(70)).then("C"),
          + * 						CaseOperator.when(Gte.valueOf("average").greaterThanEqualToValue(60)).then("D"))
          + * 				.defaultTo("F"));
          + * 
          + * + * The above sample is equivalent to the JSON update statement: + * + *
          + * db.collection.update(
          + *    { },
          + *    [
          + *      { $set: { average : { $avg: "$tests" } } },
          + *      { $set: { grade: { $switch: {
          + *                            branches: [
          + *                                { case: { $gte: [ "$average", 90 ] }, then: "A" },
          + *                                { case: { $gte: [ "$average", 80 ] }, then: "B" },
          + *                                { case: { $gte: [ "$average", 70 ] }, then: "C" },
          + *                                { case: { $gte: [ "$average", 60 ] }, then: "D" }
          + *                            ],
          + *                            default: "F"
          + *      } } } }
          + *    ],
          + *    { multi: true }
          + * )
          + * 
          + * + * @author Christoph Strobl + * @author Mark Paluch + * @see MongoDB + * Reference Documentation + * @since 3.0 + */ +public class AggregationUpdate extends Aggregation implements UpdateDefinition { + + private boolean isolated = false; + private final Set keysTouched = new HashSet<>(); + + /** + * Create new {@link AggregationUpdate}. + */ + protected AggregationUpdate() { + this(new ArrayList<>()); + } + + /** + * Create new {@link AggregationUpdate} with the given aggregation pipeline to apply. + * + * @param pipeline must not be {@literal null}. + */ + protected AggregationUpdate(List pipeline) { + + super(pipeline); + + for (AggregationOperation operation : pipeline) { + if (operation instanceof FieldsExposingAggregationOperation exposingAggregationOperation) { + exposingAggregationOperation.getFields().forEach(it -> keysTouched.add(it.getName())); + } + } + } + + /** + * Start defining the update pipeline to execute. + * + * @return new instance of {@link AggregationUpdate}. + */ + public static AggregationUpdate update() { + return new AggregationUpdate(); + } + + /** + * Create a new AggregationUpdate from the given {@link AggregationOperation}s. + * + * @return new instance of {@link AggregationUpdate}. + */ + public static AggregationUpdate from(List pipeline) { + return new AggregationUpdate(pipeline); + } + + /** + * Adds new fields to documents. {@code $set} outputs documents that contain all existing fields from the input + * documents and newly added fields. + * + * @param setOperation must not be {@literal null}. + * @return this. + * @see $set Aggregation Reference + */ + @Contract("_ -> this") + public AggregationUpdate set(SetOperation setOperation) { + + Assert.notNull(setOperation, "SetOperation must not be null"); + + setOperation.getFields().forEach(it -> { + keysTouched.add(it.getName()); + }); + pipeline.add(setOperation); + return this; + } + + /** + * {@code $unset} removes/excludes fields from documents. + * + * @param unsetOperation must not be {@literal null}. + * @return this. + * @see $unset Aggregation + * Reference + */ + @Contract("_ -> this") + public AggregationUpdate unset(UnsetOperation unsetOperation) { + + Assert.notNull(unsetOperation, "UnsetOperation must not be null"); + + pipeline.add(unsetOperation); + keysTouched.addAll(unsetOperation.removedFieldNames()); + return this; + } + + /** + * {@code $replaceWith} replaces the input document with the specified document. The operation replaces all existing + * fields in the input document, including the _id field. + * + * @param replaceWithOperation must not be {@literal null}. + * @return this. + * @see $replaceWith Aggregation + * Reference + */ + @Contract("_ -> this") + public AggregationUpdate replaceWith(ReplaceWithOperation replaceWithOperation) { + + Assert.notNull(replaceWithOperation, "ReplaceWithOperation must not be null"); + pipeline.add(replaceWithOperation); + return this; + } + + /** + * {@code $replaceWith} replaces the input document with the value. + * + * @param value must not be {@literal null}. + * @return this. + */ + @Contract("_ -> this") + public AggregationUpdate replaceWith(Object value) { + + Assert.notNull(value, "Value must not be null"); + return replaceWith(ReplaceWithOperation.replaceWithValue(value)); + } + + /** + * Fluent API variant for {@code $set} adding a single {@link SetOperation pipeline operation} every time. To update + * multiple fields within one {@link SetOperation} use {@link #set(SetOperation)}. + * + * @param key must not be {@literal null}. + * @return new instance of {@link SetValueAppender}. + * @see #set(SetOperation) + */ + @Contract("_ -> new") + public SetValueAppender set(String key) { + + Assert.notNull(key, "Key must not be null"); + + return new SetValueAppender() { + + @Override + public AggregationUpdate toValue(@Nullable Object value) { + return set(SetOperation.builder().set(key).toValue(value)); + } + + @Override + public AggregationUpdate toValueOf(Object value) { + + Assert.notNull(value, "Value must not be null"); + return set(SetOperation.builder().set(key).toValueOf(value)); + } + }; + } + + /** + * Short for {@link #unset(UnsetOperation)}. + * + * @param keys the fields to remove. + * @return this. + */ + @Contract("_ -> this") + public AggregationUpdate unset(String... keys) { + + Assert.notNull(keys, "Keys must not be null"); + Assert.noNullElements(keys, "Keys must not contain null elements"); + + return unset(new UnsetOperation(Arrays.stream(keys).map(Fields::field).collect(Collectors.toList()))); + } + + /** + * Prevents a write operation that affects multiple documents from yielding to other reads or writes + * once the first document is written.
          + * Use with {@link org.springframework.data.mongodb.core.MongoOperations#updateMulti(Query, UpdateDefinition, Class)}. + * + * @return never {@literal null}. + */ + @Contract("-> this") + public AggregationUpdate isolated() { + + isolated = true; + return this; + } + + @Override + public Boolean isIsolated() { + return isolated; + } + + @Override + public Document getUpdateObject() { + return new Document("", toPipeline(Aggregation.DEFAULT_CONTEXT)); + } + + @Override + public boolean modifies(String key) { + return keysTouched.contains(key); + } + + @Override + public void inc(String key) { + set(new SetOperation(key, ArithmeticOperators.valueOf(key).add(1))); + } + + @Override + public List getArrayFilters() { + return Collections.emptyList(); + } + + @Override + public String toString() { + + StringJoiner joiner = new StringJoiner(",\n", "[\n", "\n]"); + toPipeline(Aggregation.DEFAULT_CONTEXT).stream().map(SerializationUtils::serializeToJsonSafely) + .forEach(joiner::add); + return joiner.toString(); + } + + /** + * Fluent API AggregationUpdate builder. + * + * @author Christoph Strobl + */ + public interface SetValueAppender { + + /** + * Define the target value as is. + * + * @param value can be {@literal null}. + * @return never {@literal null}. + */ + AggregationUpdate toValue(@Nullable Object value); + + /** + * Define the target value as value, an {@link AggregationExpression} or a {@link Field} reference. + * + * @param value can be {@literal null}. + * @return never {@literal null}. + */ + AggregationUpdate toValueOf(Object value); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUtils.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUtils.java index 59927eb50b..e84f7ed1b0 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUtils.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationUtils.java @@ -1,11 +1,11 @@ /* - * Copyright 2017-2018 the original author or authors. + * Copyright 2017-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -32,16 +32,16 @@ interface AggregationUtils { * Converts the given {@link Range} into an array of values. * * @param range must not be {@literal null}. - * @return + * @return never {@literal null}. */ - public static List toRangeValues(Range range) { + static List toRangeValues(Range range) { - Assert.notNull(range, "Range must not be null!"); + Assert.notNull(range, "Range must not be null"); List result = new ArrayList(2); result.add(range.getLowerBound().getValue() - .orElseThrow(() -> new IllegalArgumentException("Lower bound of range must be bounded!"))); - range.getUpperBound().getValue().ifPresent(it -> result.add(it)); + .orElseThrow(() -> new IllegalArgumentException("Lower bound of range must be bounded"))); + range.getUpperBound().getValue().ifPresent(result::add); return result; } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationVariable.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationVariable.java new file mode 100644 index 0000000000..522dd5eae5 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/AggregationVariable.java @@ -0,0 +1,133 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.jspecify.annotations.Nullable; +import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; + +/** + * A special field that points to a variable {@code $$} expression. + * + * @author Christoph Strobl + * @since 4.1.3 + */ +public interface AggregationVariable extends Field { + + String PREFIX = "$$"; + + /** + * @return {@literal true} if the fields {@link #getName() name} does not match the defined {@link #getTarget() + * target}. + */ + @Override + default boolean isAliased() { + return !ObjectUtils.nullSafeEquals(getName(), getTarget()); + } + + @Override + default String getName() { + return getTarget(); + } + + @Override + default boolean isInternal() { + return false; + } + + /** + * Create a new {@link AggregationVariable} for the given name. + *

          + * Variables start with {@code $$}. If not, the given value gets prefixed with {@code $$}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link AggregationVariable}. + * @throws IllegalArgumentException if given value is {@literal null}. + */ + static AggregationVariable variable(String value) { + + Assert.notNull(value, "Value must not be null"); + return new AggregationVariable() { + + private final String val = AggregationVariable.prefixVariable(value); + + @Override + public String getTarget() { + return val; + } + }; + } + + /** + * Create a new {@link #isInternal() local} {@link AggregationVariable} for the given name. + *

          + * Variables start with {@code $$}. If not, the given value gets prefixed with {@code $$}. + * + * @param value must not be {@literal null}. + * @return new instance of {@link AggregationVariable}. + * @throws IllegalArgumentException if given value is {@literal null}. + */ + static AggregationVariable localVariable(String value) { + + Assert.notNull(value, "Value must not be null"); + return new AggregationVariable() { + + private final String val = AggregationVariable.prefixVariable(value); + + @Override + public String getTarget() { + return val; + } + + @Override + public boolean isInternal() { + return true; + } + }; + } + + /** + * Check if the given field name reference may be variable. + * + * @param fieldRef can be {@literal null}. + * @return true if given value matches the variable identification pattern. + */ + static boolean isVariable(@Nullable String fieldRef) { + return fieldRef != null && fieldRef.stripLeading().matches("^\\$\\$\\w.*"); + } + + /** + * Check if the given field may be variable. + * + * @param field can be {@literal null}. + * @return true if given {@link Field field} is an {@link AggregationVariable} or if its value is a + * {@link #isVariable(String) variable}. + */ + static boolean isVariable(Field field) { + + if (field instanceof AggregationVariable) { + return true; + } + return isVariable(field.getTarget()); + } + + private static String prefixVariable(String variable) { + + var trimmed = variable.stripLeading(); + return trimmed.startsWith(PREFIX) ? trimmed : (PREFIX + trimmed); + } + +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java index 54f3c430b0..c7787b382c 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArithmeticOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,19 +17,34 @@ import java.util.Collections; import java.util.List; +import java.util.Locale; +import org.bson.Document; +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Avg; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovariancePop; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.CovarianceSamp; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Max; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Median; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Min; +import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Percentile; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevPop; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.StdDevSamp; import org.springframework.data.mongodb.core.aggregation.AccumulatorOperators.Sum; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.WindowUnit; +import org.springframework.data.mongodb.core.aggregation.SetWindowFieldsOperation.WindowUnits; +import org.springframework.lang.Contract; import org.springframework.util.Assert; +import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; /** * Gateway to {@literal Arithmetic} aggregation operations that perform math operations on numbers. * * @author Christoph Strobl + * @author Mark Paluch + * @author Mushtaq Ahmed + * @author Julia Lee * @since 1.10 */ public class ArithmeticOperators { @@ -38,7 +53,7 @@ public class ArithmeticOperators { * Take the field referenced by given {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ArithmeticOperatorFactory}. */ public static ArithmeticOperatorFactory valueOf(String fieldReference) { return new ArithmeticOperatorFactory(fieldReference); @@ -48,19 +63,30 @@ public static ArithmeticOperatorFactory valueOf(String fieldReference) { * Take the value resulting from the given {@link AggregationExpression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ArithmeticOperatorFactory}. */ public static ArithmeticOperatorFactory valueOf(AggregationExpression expression) { return new ArithmeticOperatorFactory(expression); } + /** + * Creates new {@link AggregationExpression} that returns a random float between {@code 0} and {@code 1} each time it + * is called. + * + * @return new instance of {@link Rand}. + * @since 3.3 + */ + public static Rand rand() { + return new Rand(); + } + /** * @author Christoph Strobl */ public static class ArithmeticOperatorFactory { - private final String fieldReference; - private final AggregationExpression expression; + private final @Nullable String fieldReference; + private final @Nullable AggregationExpression expression; /** * Creates new {@link ArithmeticOperatorFactory} for given {@literal fieldReference}. @@ -69,7 +95,7 @@ public static class ArithmeticOperatorFactory { */ public ArithmeticOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; } @@ -81,7 +107,7 @@ public ArithmeticOperatorFactory(String fieldReference) { */ public ArithmeticOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; } @@ -89,8 +115,9 @@ public ArithmeticOperatorFactory(AggregationExpression expression) { /** * Creates new {@link AggregationExpression} that returns the absolute value of the associated number. * - * @return + * @return new instance of {@link Abs}. */ + @SuppressWarnings("NullAway") public Abs abs() { return usesFieldRef() ? Abs.absoluteValueOf(fieldReference) : Abs.absoluteValueOf(expression); } @@ -100,11 +127,11 @@ public Abs abs() { * number. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Add}. */ public Add add(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createAdd().add(fieldReference); } @@ -113,11 +140,11 @@ public Add add(String fieldReference) { * {@link AggregationExpression} to the associated number. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Add}. */ public Add add(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createAdd().add(expression); } @@ -125,38 +152,81 @@ public Add add(AggregationExpression expression) { * Creates new {@link AggregationExpression} that adds the given {@literal value} to the associated number. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Add}. */ public Add add(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createAdd().add(value); } + @SuppressWarnings("NullAway") private Add createAdd() { return usesFieldRef() ? Add.valueOf(fieldReference) : Add.valueOf(expression); } /** * Creates new {@link AggregationExpression} that returns the smallest integer greater than or equal to the - * assoicated number. + * associated number. * - * @return + * @return new instance of {@link Ceil}. */ + @SuppressWarnings("NullAway") public Ceil ceil() { return usesFieldRef() ? Ceil.ceilValueOf(fieldReference) : Ceil.ceilValueOf(expression); } /** - * Creates new {@link AggregationExpression} that ivides the associated number by number referenced via + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative() { + return derivative((String) null); + } + + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @param unit The time unit ({@link WindowUnits#WEEK}, {@link WindowUnits#DAY}, {@link WindowUnits#HOUR}, + * {@link WindowUnits#MINUTE}, {@link WindowUnits#SECOND}, {@link WindowUnits#MILLISECOND}) to apply. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Derivative derivative(WindowUnit unit) { + + Assert.notNull(unit, "Window unit must not be null"); + + return derivative(unit.name().toLowerCase(Locale.ROOT)); + } + + /** + * Creates new {@link AggregationExpression} that calculates the mathematical derivative value. + * + * @param unit The time unit ({@literal week, day, hour, minute, second, millisecond}) to apply can be + * {@literal null}. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + @SuppressWarnings("NullAway") + public Derivative derivative(@Nullable String unit) { + + Derivative derivative = usesFieldRef() ? Derivative.derivativeOf(fieldReference) + : Derivative.derivativeOf(expression); + return StringUtils.hasText(unit) ? derivative.unit(unit) : derivative; + } + + /** + * Creates new {@link AggregationExpression} that divides the associated number by number referenced via * {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Divide}. */ public Divide divideBy(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createDivide().divideBy(fieldReference); } @@ -165,26 +235,27 @@ public Divide divideBy(String fieldReference) { * {@literal expression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Divide}. */ public Divide divideBy(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createDivide().divideBy(expression); } /** * Creates new {@link AggregationExpression} that divides the associated number by given {@literal value}. * - * @param value - * @return + * @param value must not be {@literal null}. + * @return new instance of {@link Divide}. */ public Divide divideBy(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createDivide().divideBy(value); } + @SuppressWarnings("NullAway") private Divide createDivide() { return usesFieldRef() ? Divide.valueOf(fieldReference) : Divide.valueOf(expression); } @@ -192,8 +263,9 @@ private Divide createDivide() { /** * Creates new {@link AggregationExpression} that raises Euler’s number (i.e. e ) on the associated number. * - * @return + * @return new instance of {@link Exp}. */ + @SuppressWarnings("NullAway") public Exp exp() { return usesFieldRef() ? Exp.expValueOf(fieldReference) : Exp.expValueOf(expression); } @@ -202,18 +274,60 @@ public Exp exp() { * Creates new {@link AggregationExpression} that returns the largest integer less than or equal to the associated * number. * - * @return + * @return new instance of {@link Floor}. */ + @SuppressWarnings("NullAway") public Floor floor() { return usesFieldRef() ? Floor.floorValueOf(fieldReference) : Floor.floorValueOf(expression); } /** - * Creates new {@link AggregationExpression} that calculates the natural logarithm ln (i.e loge) of the assoicated + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @return new instance of {@link Integral}. + * @since 3.3 + */ + @SuppressWarnings("NullAway") + public Integral integral() { + return usesFieldRef() ? Integral.integralOf(fieldReference) : Integral.integralOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @param unit The time unit ({@link WindowUnits#WEEK}, {@link WindowUnits#DAY}, {@link WindowUnits#HOUR}, + * {@link WindowUnits#MINUTE}, {@link WindowUnits#SECOND}, {@link WindowUnits#MILLISECOND}) to apply. + * @return new instance of {@link Derivative}. + * @since 3.3 + */ + public Integral integral(WindowUnit unit) { + + Assert.notNull(unit, "Window unit must not be null"); + + return integral(unit.name().toLowerCase(Locale.ROOT)); + } + + /** + * Creates new {@link AggregationExpression} that calculates the approximation for the mathematical integral value. + * + * @param unit the unit of measure. + * @return new instance of {@link Integral}. + * @since 3.3 + */ + public Integral integral(String unit) { + + Assert.hasText(unit, "Unit must not be empty"); + + return integral().unit(unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the natural logarithm ln (i.e loge) of the associated * number. * - * @return + * @return new instance of {@link Ln}. */ + @SuppressWarnings("NullAway") public Ln ln() { return usesFieldRef() ? Ln.lnValueOf(fieldReference) : Ln.lnValueOf(expression); } @@ -223,11 +337,11 @@ public Ln ln() { * referenced via {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Log}. */ public Log log(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createLog().log(fieldReference); } @@ -236,12 +350,12 @@ public Log log(String fieldReference) { * extracted by given {@link AggregationExpression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Log}. */ public Log log(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); - return createLog().log(fieldReference); + Assert.notNull(expression, "Expression must not be null"); + return createLog().log(expression); } /** @@ -249,14 +363,15 @@ public Log log(AggregationExpression expression) { * {@literal base}. * * @param base must not be {@literal null}. - * @return + * @return new instance of {@link Log}. */ public Log log(Number base) { - Assert.notNull(base, "Base must not be null!"); + Assert.notNull(base, "Base must not be null"); return createLog().log(base); } + @SuppressWarnings("NullAway") private Log createLog() { return usesFieldRef() ? Log.valueOf(fieldReference) : Log.valueOf(expression); } @@ -264,8 +379,9 @@ private Log createLog() { /** * Creates new {@link AggregationExpression} that calculates the log base 10 for the associated number. * - * @return + * @return new instance of {@link Log10}. */ + @SuppressWarnings("NullAway") public Log10 log10() { return usesFieldRef() ? Log10.log10ValueOf(fieldReference) : Log10.log10ValueOf(expression); } @@ -275,11 +391,11 @@ public Log10 log10() { * remainder. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Mod}. */ public Mod mod(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createMod().mod(fieldReference); } @@ -288,11 +404,11 @@ public Mod mod(String fieldReference) { * remainder. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Mod}. */ public Mod mod(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createMod().mod(expression); } @@ -301,14 +417,15 @@ public Mod mod(AggregationExpression expression) { * remainder. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Mod}. */ public Mod mod(Number value) { - Assert.notNull(value, "Base must not be null!"); + Assert.notNull(value, "Base must not be null"); return createMod().mod(value); } + @SuppressWarnings("NullAway") private Mod createMod() { return usesFieldRef() ? Mod.valueOf(fieldReference) : Mod.valueOf(expression); } @@ -317,11 +434,11 @@ private Mod createMod() { * Creates new {@link AggregationExpression} that multiplies the associated number with another. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Multiply}. */ public Multiply multiplyBy(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createMultiply().multiplyBy(fieldReference); } @@ -329,11 +446,11 @@ public Multiply multiplyBy(String fieldReference) { * Creates new {@link AggregationExpression} that multiplies the associated number with another. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Multiply}. */ public Multiply multiplyBy(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createMultiply().multiplyBy(expression); } @@ -341,14 +458,15 @@ public Multiply multiplyBy(AggregationExpression expression) { * Creates new {@link AggregationExpression} that multiplies the associated number with another. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Multiply}. */ public Multiply multiplyBy(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createMultiply().multiplyBy(value); } + @SuppressWarnings("NullAway") private Multiply createMultiply() { return usesFieldRef() ? Multiply.valueOf(fieldReference) : Multiply.valueOf(expression); } @@ -357,11 +475,11 @@ private Multiply createMultiply() { * Creates new {@link AggregationExpression} that raises the associated number to the specified exponent. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Pow}. */ public Pow pow(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createPow().pow(fieldReference); } @@ -369,11 +487,11 @@ public Pow pow(String fieldReference) { * Creates new {@link AggregationExpression} that raises the associated number to the specified exponent. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Pow}. */ public Pow pow(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createPow().pow(expression); } @@ -381,14 +499,15 @@ public Pow pow(AggregationExpression expression) { * Creates new {@link AggregationExpression} that raises the associated number to the specified exponent. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Pow}. */ public Pow pow(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createPow().pow(value); } + @SuppressWarnings("NullAway") private Pow createPow() { return usesFieldRef() ? Pow.valueOf(fieldReference) : Pow.valueOf(expression); } @@ -396,8 +515,9 @@ private Pow createPow() { /** * Creates new {@link AggregationExpression} that calculates the square root of the associated number. * - * @return + * @return new instance of {@link Sqrt}. */ + @SuppressWarnings("NullAway") public Sqrt sqrt() { return usesFieldRef() ? Sqrt.sqrtOf(fieldReference) : Sqrt.sqrtOf(expression); } @@ -406,11 +526,11 @@ public Sqrt sqrt() { * Creates new {@link AggregationExpression} that subtracts value of given from the associated number. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Subtract}. */ public Subtract subtract(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createSubtract().subtract(fieldReference); } @@ -418,26 +538,27 @@ public Subtract subtract(String fieldReference) { * Creates new {@link AggregationExpression} that subtracts value of given from the associated number. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Subtract}. */ public Subtract subtract(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createSubtract().subtract(expression); } /** * Creates new {@link AggregationExpression} that subtracts value from the associated number. * - * @param value - * @return + * @param value must not be {@literal null}. + * @return new instance of {@link Subtract}. */ public Subtract subtract(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createSubtract().subtract(value); } + @SuppressWarnings("NullAway") private Subtract createSubtract() { return usesFieldRef() ? Subtract.valueOf(fieldReference) : Subtract.valueOf(expression); } @@ -445,8 +566,9 @@ private Subtract createSubtract() { /** * Creates new {@link AggregationExpression} that truncates a number to its integer. * - * @return + * @return new instance of {@link Trunc}. */ + @SuppressWarnings("NullAway") public Trunc trunc() { return usesFieldRef() ? Trunc.truncValueOf(fieldReference) : Trunc.truncValueOf(expression); } @@ -454,8 +576,9 @@ public Trunc trunc() { /** * Creates new {@link AggregationExpression} that calculates and returns the sum of numeric values. * - * @return + * @return new instance of {@link Sum}. */ + @SuppressWarnings("NullAway") public Sum sum() { return usesFieldRef() ? AccumulatorOperators.Sum.sumOf(fieldReference) : AccumulatorOperators.Sum.sumOf(expression); @@ -464,8 +587,9 @@ public Sum sum() { /** * Creates new {@link AggregationExpression} that returns the average value of the numeric values. * - * @return + * @return new instance of {@link Avg}. */ + @SuppressWarnings("NullAway") public Avg avg() { return usesFieldRef() ? AccumulatorOperators.Avg.avgOf(fieldReference) : AccumulatorOperators.Avg.avgOf(expression); @@ -474,8 +598,9 @@ public Avg avg() { /** * Creates new {@link AggregationExpression} that returns the maximum value. * - * @return + * @return new instance of {@link Max}. */ + @SuppressWarnings("NullAway") public Max max() { return usesFieldRef() ? AccumulatorOperators.Max.maxOf(fieldReference) : AccumulatorOperators.Max.maxOf(expression); @@ -484,8 +609,9 @@ public Max max() { /** * Creates new {@link AggregationExpression} that returns the minimum value. * - * @return + * @return new instance of {@link Min}. */ + @SuppressWarnings("NullAway") public Min min() { return usesFieldRef() ? AccumulatorOperators.Min.minOf(fieldReference) : AccumulatorOperators.Min.minOf(expression); @@ -494,8 +620,9 @@ public Min min() { /** * Creates new {@link AggregationExpression} that calculates the population standard deviation of the input values. * - * @return + * @return new instance of {@link StdDevPop}. */ + @SuppressWarnings("NullAway") public StdDevPop stdDevPop() { return usesFieldRef() ? AccumulatorOperators.StdDevPop.stdDevPopOf(fieldReference) : AccumulatorOperators.StdDevPop.stdDevPopOf(expression); @@ -504,13 +631,376 @@ public StdDevPop stdDevPop() { /** * Creates new {@link AggregationExpression} that calculates the sample standard deviation of the input values. * - * @return + * @return new instance of {@link StdDevSamp}. */ + @SuppressWarnings("NullAway") public StdDevSamp stdDevSamp() { return usesFieldRef() ? AccumulatorOperators.StdDevSamp.stdDevSampOf(fieldReference) : AccumulatorOperators.StdDevSamp.stdDevSampOf(expression); } + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the population covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(String fieldReference) { + return covariancePop().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the population covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovariancePop covariancePop(AggregationExpression expression) { + return covariancePop().and(expression); + } + + @SuppressWarnings("NullAway") + private CovariancePop covariancePop() { + return usesFieldRef() ? CovariancePop.covariancePopOf(fieldReference) : CovariancePop.covariancePopOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the value of the + * given field to calculate the sample covariance of the two. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(String fieldReference) { + return covarianceSamp().and(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that uses the previous input (field/expression) and the result of the + * given {@link AggregationExpression expression} to calculate the sample covariance of the two. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link CovariancePop}. + * @since 3.3 + */ + public CovarianceSamp covarianceSamp(AggregationExpression expression) { + return covarianceSamp().and(expression); + } + + @SuppressWarnings("NullAway") + private CovarianceSamp covarianceSamp() { + return usesFieldRef() ? CovarianceSamp.covarianceSampOf(fieldReference) + : CovarianceSamp.covarianceSampOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that rounds a number to a whole integer or to a specified decimal + * place. + * + * @return new instance of {@link Round}. + * @since 3.0 + */ + @SuppressWarnings("NullAway") + public Round round() { + return usesFieldRef() ? Round.roundValueOf(fieldReference) : Round.roundValueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that rounds a number to a specified decimal place. + * + * @return new instance of {@link Round}. + * @since 3.0 + */ + public Round roundToPlace(int place) { + return round().place(place); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Sin}. + * @since 3.3 + */ + public Sin sin() { + return sin(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value in the given + * {@link AngularUnit unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Sin}. + * @since 3.3 + */ + @SuppressWarnings("NullAway") + public Sin sin(AngularUnit unit) { + return usesFieldRef() ? Sin.sinOf(fieldReference, unit) : Sin.sinOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Sinh}. + * @since 3.3 + */ + public Sinh sinh() { + return sinh(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the sine of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Sinh}. + * @since 3.3 + */ + @SuppressWarnings("NullAway") + public Sinh sinh(AngularUnit unit) { + return usesFieldRef() ? Sinh.sinhOf(fieldReference, unit) : Sinh.sinhOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse sine of a numeric value. + * + * @return new instance of {@link ASin}. + * @since 3.3 + */ + @SuppressWarnings("NullAway") + public ASin asin() { + return usesFieldRef() ? ASin.asinOf(fieldReference) : ASin.asinOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a numeric value. + * + * @return new instance of {@link ASinh}. + * @since 3.3 + */ + @SuppressWarnings("NullAway") + public ASinh asinh() { + return usesFieldRef() ? ASinh.asinhOf(fieldReference) : ASinh.asinhOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Cos}. + * @since 3.3 + */ + public Cos cos() { + return cos(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the cosine of a numeric value in the given + * {@link AngularUnit unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Cos}. + * @since 3.3 + */ + @SuppressWarnings("NullAway") + public Cos cos(AngularUnit unit) { + return usesFieldRef() ? Cos.cosOf(fieldReference, unit) : Cos.cosOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Cosh}. + * @since 3.3 + */ + public Cosh cosh() { + return cosh(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic cosine of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Cosh}. + * @since 3.3 + */ + @SuppressWarnings("NullAway") + public Cosh cosh(AngularUnit unit) { + return usesFieldRef() ? Cosh.coshOf(fieldReference, unit) : Cosh.coshOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse cosine of a numeric value. + * + * @return new instance of {@link ACos}. + * @since 3.4 + */ + @SuppressWarnings("NullAway") + public ACos acos() { + return usesFieldRef() ? ACos.acosOf(fieldReference) : ACos.acosOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic cosine of a numeric value. + * + * @return new instance of {@link ACosh}. + * @since 3.4 + */ + @SuppressWarnings("NullAway") + public ACosh acosh() { + return usesFieldRef() ? ACosh.acoshOf(fieldReference) : ACosh.acoshOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Tan}. + * @since 3.3 + */ + public Tan tan() { + return tan(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of a numeric value. + * + * @return new instance of {@link ATan}. + * @since 3.3 + */ + @SuppressWarnings("NullAway") + public ATan atan() { + return usesFieldRef() ? ATan.atanOf(fieldReference) : ATan.atanOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given numeric value in the argument. + * + * @param value the numeric value + * @return new instance of {@link ATan2}. + * @since 3.3 + */ + @SuppressWarnings("NullAway") + public ATan2 atan2(Number value) { + + Assert.notNull(value, "Value must not be null"); + return createATan2().atan2of(value); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given field reference in the argument. + * + * @param fieldReference the numeric value + * @return new instance of {@link ATan2}. + * @since 3.3 + */ + public ATan2 atan2(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createATan2().atan2of(fieldReference); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse tangent of the the numeric value divided by + * the given {@link AggregationExpression} in the argument. + * + * @param expression the expression evaluating to a numeric value + * @return new instance of {@link ATan2}. + * @since 3.3 + */ + public ATan2 atan2(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return createATan2().atan2of(expression); + } + + @SuppressWarnings("NullAway") + private ATan2 createATan2() { + return usesFieldRef() ? ATan2.valueOf(fieldReference) : ATan2.valueOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a numeric value. + * + * @return new instance of {@link ATanh}. + * @since 3.3 + */ + @SuppressWarnings("NullAway") + public ATanh atanh() { + return usesFieldRef() ? ATanh.atanhOf(fieldReference) : ATanh.atanhOf(expression); + } + + /** + * Creates new {@link AggregationExpression} that calculates the tangent of a numeric value in the given + * {@link AngularUnit unit}. + * + * @param unit the unit of measure. + * @return new instance of {@link Tan}. + * @since 3.3 + */ + @SuppressWarnings("NullAway") + public Tan tan(AngularUnit unit) { + return usesFieldRef() ? Tan.tanOf(fieldReference, unit) : Tan.tanOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value given in + * {@link AngularUnit#RADIANS radians}. + * + * @return new instance of {@link Tan}. + * @since 3.3 + */ + public Tanh tanh() { + return tanh(AngularUnit.RADIANS); + } + + /** + * Creates new {@link AggregationExpression} that calculates the hyperbolic tangent of a numeric value. + * + * @param unit the unit of measure. + * @return new instance of {@link Tanh}. + * @since 3.3 + */ + @SuppressWarnings("NullAway") + public Tanh tanh(AngularUnit unit) { + return usesFieldRef() ? Tanh.tanhOf(fieldReference, unit) : Tanh.tanhOf(expression, unit); + } + + /** + * Creates new {@link AggregationExpression} that calculates the requested percentile(s) of the numeric value. + * + * @return new instance of {@link Percentile}. + * @param percentages must not be {@literal null}. + * @since 4.2 + */ + @SuppressWarnings("NullAway") + public Percentile percentile(Double... percentages) { + Percentile percentile = usesFieldRef() ? AccumulatorOperators.Percentile.percentileOf(fieldReference) + : AccumulatorOperators.Percentile.percentileOf(expression); + return percentile.percentages(percentages); + } + + /** + * Creates new {@link AggregationExpression} that calculates the requested percentile(s) of the numeric value. + * + * @return new instance of {@link Median}. + * @since 4.2 + */ + @SuppressWarnings("NullAway") + public Median median() { + return usesFieldRef() ? AccumulatorOperators.Median.medianOf(fieldReference) + : AccumulatorOperators.Median.medianOf(expression); + } + private boolean usesFieldRef() { return fieldReference != null; } @@ -536,11 +1026,11 @@ protected String getMongoMethod() { * Creates new {@link Abs}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Abs}. */ public static Abs absoluteValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Abs(Fields.field(fieldReference)); } @@ -548,11 +1038,11 @@ public static Abs absoluteValueOf(String fieldReference) { * Creates new {@link Abs}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Abs}. */ public static Abs absoluteValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Abs(expression); } @@ -560,11 +1050,11 @@ public static Abs absoluteValueOf(AggregationExpression expression) { * Creates new {@link Abs}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Abs}. */ public static Abs absoluteValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Abs(value); } } @@ -589,11 +1079,11 @@ protected String getMongoMethod() { * Creates new {@link Add}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Add}. */ public static Add valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Add(asFields(fieldReference)); } @@ -601,11 +1091,11 @@ public static Add valueOf(String fieldReference) { * Creates new {@link Add}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Add}. */ public static Add valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Add(Collections.singletonList(expression)); } @@ -613,26 +1103,47 @@ public static Add valueOf(AggregationExpression expression) { * Creates new {@link Add}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Add}. */ public static Add valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Add(Collections.singletonList(value)); } + /** + * Add the value stored at the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Add}. + */ + @Contract("_ -> new") public Add add(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Add(append(Fields.field(fieldReference))); } + /** + * Add the evaluation result of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Add}. + */ + @Contract("_ -> new") public Add add(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Add(append(expression)); } + /** + * Add the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Add}. + */ + @Contract("_ -> new") public Add add(Number value) { return new Add(append(value)); } @@ -658,11 +1169,11 @@ protected String getMongoMethod() { * Creates new {@link Ceil}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Ceil}. */ public static Ceil ceilValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Ceil(Fields.field(fieldReference)); } @@ -670,11 +1181,11 @@ public static Ceil ceilValueOf(String fieldReference) { * Creates new {@link Ceil}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Ceil}. */ public static Ceil ceilValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Ceil(expression); } @@ -682,11 +1193,11 @@ public static Ceil ceilValueOf(AggregationExpression expression) { * Creates new {@link Ceil}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Ceil}. */ public static Ceil ceilValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Ceil(value); } } @@ -711,11 +1222,11 @@ protected String getMongoMethod() { * Creates new {@link Divide}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Divide}. */ public static Divide valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Divide(asFields(fieldReference)); } @@ -723,11 +1234,11 @@ public static Divide valueOf(String fieldReference) { * Creates new {@link Divide}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Divide}. */ public static Divide valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Divide(Collections.singletonList(expression)); } @@ -735,26 +1246,47 @@ public static Divide valueOf(AggregationExpression expression) { * Creates new {@link Divide}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Divide}. */ public static Divide valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Divide(Collections.singletonList(value)); } + /** + * Divide by the value stored at the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Divide}. + */ + @Contract("_ -> new") public Divide divideBy(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Divide(append(Fields.field(fieldReference))); } + /** + * Divide by the evaluation results of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Divide}. + */ + @Contract("_ -> new") public Divide divideBy(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Divide(append(expression)); } + /** + * Divide by the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Divide}. + */ + @Contract("_ -> new") public Divide divideBy(Number value) { return new Divide(append(value)); } @@ -780,11 +1312,11 @@ protected String getMongoMethod() { * Creates new {@link Exp}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Exp}. */ public static Exp expValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Exp(Fields.field(fieldReference)); } @@ -792,11 +1324,11 @@ public static Exp expValueOf(String fieldReference) { * Creates new {@link Exp}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Exp}. */ public static Exp expValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Exp(expression); } @@ -804,11 +1336,11 @@ public static Exp expValueOf(AggregationExpression expression) { * Creates new {@link Exp}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Exp}. */ public static Exp expValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Exp(value); } } @@ -833,11 +1365,11 @@ protected String getMongoMethod() { * Creates new {@link Floor}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Floor}. */ public static Floor floorValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Floor(Fields.field(fieldReference)); } @@ -845,11 +1377,11 @@ public static Floor floorValueOf(String fieldReference) { * Creates new {@link Floor}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Floor}. */ public static Floor floorValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Floor(expression); } @@ -857,11 +1389,11 @@ public static Floor floorValueOf(AggregationExpression expression) { * Creates new {@link Floor}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Floor}. */ public static Floor floorValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Floor(value); } } @@ -886,11 +1418,11 @@ protected String getMongoMethod() { * Creates new {@link Ln}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Ln}. */ public static Ln lnValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Ln(Fields.field(fieldReference)); } @@ -898,11 +1430,11 @@ public static Ln lnValueOf(String fieldReference) { * Creates new {@link Ln}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Ln}. */ public static Ln lnValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Ln(expression); } @@ -910,11 +1442,11 @@ public static Ln lnValueOf(AggregationExpression expression) { * Creates new {@link Ln}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Ln}. */ public static Ln lnValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Ln(value); } } @@ -939,11 +1471,11 @@ protected String getMongoMethod() { * Creates new {@link Min}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Log}. */ public static Log valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Log(asFields(fieldReference)); } @@ -951,11 +1483,11 @@ public static Log valueOf(String fieldReference) { * Creates new {@link Log}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Log}. */ public static Log valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Log(Collections.singletonList(expression)); } @@ -967,22 +1499,43 @@ public static Log valueOf(AggregationExpression expression) { */ public static Log valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Log(Collections.singletonList(value)); } + /** + * Use the value stored at the given field as log base. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Log}. + */ + @Contract("_ -> new") public Log log(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Log(append(Fields.field(fieldReference))); } + /** + * Use the evaluated value of the given {@link AggregationExpression} as log base. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Log}. + */ + @Contract("_ -> new") public Log log(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Log(append(expression)); } + /** + * Use the given value as log base. + * + * @param base must not be {@literal null}. + * @return new instance of {@link Log}. + */ + @Contract("_ -> new") public Log log(Number base) { return new Log(append(base)); } @@ -1008,11 +1561,11 @@ protected String getMongoMethod() { * Creates new {@link Log10}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Log10}. */ public static Log10 log10ValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Log10(Fields.field(fieldReference)); } @@ -1020,11 +1573,11 @@ public static Log10 log10ValueOf(String fieldReference) { * Creates new {@link Log10}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Log10}. */ public static Log10 log10ValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Log10(expression); } @@ -1032,11 +1585,11 @@ public static Log10 log10ValueOf(AggregationExpression expression) { * Creates new {@link Log10}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Log10}. */ public static Log10 log10ValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Log10(value); } } @@ -1061,11 +1614,11 @@ protected String getMongoMethod() { * Creates new {@link Mod}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Mod}. */ public static Mod valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Mod(asFields(fieldReference)); } @@ -1073,11 +1626,11 @@ public static Mod valueOf(String fieldReference) { * Creates new {@link Mod}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Mod}. */ public static Mod valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Mod(Collections.singletonList(expression)); } @@ -1085,26 +1638,47 @@ public static Mod valueOf(AggregationExpression expression) { * Creates new {@link Mod}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Mod}. */ public static Mod valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Mod(Collections.singletonList(value)); } + /** + * Use the value stored at the given field as mod base. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Mod}. + */ + @Contract("_ -> new") public Mod mod(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Mod(append(Fields.field(fieldReference))); } + /** + * Use evaluated value of the given {@link AggregationExpression} as mod base. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Mod}. + */ + @Contract("_ -> new") public Mod mod(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Mod(append(expression)); } + /** + * Use the given value as mod base. + * + * @param base must not be {@literal null}. + * @return new instance of {@link Mod}. + */ + @Contract("_ -> new") public Mod mod(Number base) { return new Mod(append(base)); } @@ -1130,11 +1704,11 @@ protected String getMongoMethod() { * Creates new {@link Multiply}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Multiply}. */ public static Multiply valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Multiply(asFields(fieldReference)); } @@ -1142,11 +1716,11 @@ public static Multiply valueOf(String fieldReference) { * Creates new {@link Multiply}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Multiply}. */ public static Multiply valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Multiply(Collections.singletonList(expression)); } @@ -1154,26 +1728,47 @@ public static Multiply valueOf(AggregationExpression expression) { * Creates new {@link Multiply}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Multiply}. */ public static Multiply valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Multiply(Collections.singletonList(value)); } + /** + * Multiply by the value stored at the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Multiply}. + */ + @Contract("_ -> new") public Multiply multiplyBy(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Multiply(append(Fields.field(fieldReference))); } + /** + * Multiply by the evaluated value of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Multiply}. + */ + @Contract("_ -> new") public Multiply multiplyBy(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Multiply(append(expression)); } + /** + * Multiply by the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Multiply}. + */ + @Contract("_ -> new") public Multiply multiplyBy(Number value) { return new Multiply(append(value)); } @@ -1203,7 +1798,7 @@ protected String getMongoMethod() { */ public static Pow valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Pow(asFields(fieldReference)); } @@ -1215,7 +1810,7 @@ public static Pow valueOf(String fieldReference) { */ public static Pow valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Pow(Collections.singletonList(expression)); } @@ -1227,22 +1822,43 @@ public static Pow valueOf(AggregationExpression expression) { */ public static Pow valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Pow(Collections.singletonList(value)); } + /** + * Pow by the value stored at the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Pow}. + */ + @Contract("_ -> new") public Pow pow(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Pow(append(Fields.field(fieldReference))); } + /** + * Pow by the evaluated value of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Pow}. + */ + @Contract("_ -> new") public Pow pow(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Pow(append(expression)); } + /** + * Pow by the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Pow}. + */ + @Contract("_ -> new") public Pow pow(Number value) { return new Pow(append(value)); } @@ -1268,11 +1884,11 @@ protected String getMongoMethod() { * Creates new {@link Sqrt}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Sqrt}. */ public static Sqrt sqrtOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Sqrt(Fields.field(fieldReference)); } @@ -1280,11 +1896,11 @@ public static Sqrt sqrtOf(String fieldReference) { * Creates new {@link Sqrt}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Sqrt}. */ public static Sqrt sqrtOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Sqrt(expression); } @@ -1292,11 +1908,11 @@ public static Sqrt sqrtOf(AggregationExpression expression) { * Creates new {@link Sqrt}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Sqrt}. */ public static Sqrt sqrtOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Sqrt(value); } } @@ -1321,11 +1937,11 @@ protected String getMongoMethod() { * Creates new {@link Subtract}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Subtract}. */ public static Subtract valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Subtract(asFields(fieldReference)); } @@ -1333,11 +1949,11 @@ public static Subtract valueOf(String fieldReference) { * Creates new {@link Subtract}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Subtract}. */ public static Subtract valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Subtract(Collections.singletonList(expression)); } @@ -1345,26 +1961,47 @@ public static Subtract valueOf(AggregationExpression expression) { * Creates new {@link Subtract}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Subtract}. */ public static Subtract valueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Subtract(Collections.singletonList(value)); } + /** + * Subtract the value stored at the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Pow}. + */ + @Contract("_ -> new") public Subtract subtract(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Subtract(append(Fields.field(fieldReference))); } + /** + * Subtract the evaluated value of the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Pow}. + */ + @Contract("_ -> new") public Subtract subtract(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Subtract(append(expression)); } + /** + * Subtract the given value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Pow}. + */ + @Contract("_ -> new") public Subtract subtract(Number value) { return new Subtract(append(value)); } @@ -1390,11 +2027,11 @@ protected String getMongoMethod() { * Creates new {@link Trunc}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Trunc}. */ public static Trunc truncValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Trunc(Fields.field(fieldReference)); } @@ -1402,11 +2039,11 @@ public static Trunc truncValueOf(String fieldReference) { * Creates new {@link Trunc}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Trunc}. */ public static Trunc truncValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Trunc(expression); } @@ -1414,12 +2051,1220 @@ public static Trunc truncValueOf(AggregationExpression expression) { * Creates new {@link Trunc}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Trunc}. */ public static Trunc truncValueOf(Number value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Trunc(value); } } + + /** + * {@link Round} rounds a number to a whole integer or to a specified decimal place. + *

            + *
          • If {@link Round#place(int)} resolves to a positive integer, {@code $round} rounds to the given decimal + * places.
          • + *
          • If {@link Round#place(int)} resolves to a negative integer, {@code $round} rounds to the left of the + * decimal.
          • + *
          • If {@link Round#place(int)} resolves to a zero, {@code $round} rounds using the first digit to the right of the + * decimal.
          • + *
          + * + * @since 3.0 + */ + public static class Round extends AbstractAggregationExpression { + + private Round(Object value) { + super(value); + } + + /** + * Round the value of the field that resolves to an integer, double, decimal, or long. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Round}. + */ + public static Round roundValueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new Round(Collections.singletonList(Fields.field(fieldReference))); + } + + /** + * Round the outcome of the given expression hat resolves to an integer, double, decimal, or long. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Round}. + */ + public static Round roundValueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Round(Collections.singletonList(expression)); + } + + /** + * Round the given numeric (integer, double, decimal, or long) value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Round}. + */ + public static Round round(Number value) { + + Assert.notNull(value, "Value must not be null"); + return new Round(Collections.singletonList(value)); + } + + /** + * The place to round to. Can be between -20 and 100, exclusive. + * + * @param place value between -20 and 100, exclusive. + * @return new instance of {@link Round}. + */ + @Contract("_ -> new") + public Round place(int place) { + return new Round(append(place)); + } + + /** + * The place to round to defined by an expression that resolves to an integer between -20 and 100, exclusive. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Round}. + */ + @Contract("_ -> new") + public Round placeOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new Round(append(expression)); + } + + /** + * The place to round to defined by via a field reference that resolves to an integer between -20 and 100, + * exclusive. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Round}. + */ + @Contract("_ -> new") + public Round placeOf(String fieldReference) { + + Assert.notNull(fieldReference, "fieldReference must not be null"); + return new Round(append(Fields.field(fieldReference))); + } + + @Override + protected String getMongoMethod() { + return "$round"; + } + } + + /** + * Value object to represent an {@link AggregationExpression expression} that calculates the average rate of change + * within the specified window. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Derivative extends AbstractAggregationExpression { + + private Derivative(Object value) { + super(value); + } + + /** + * Create a new instance of {@link Derivative} for the value stored at the given field holding a numeric value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Derivative}. + */ + public static Derivative derivativeOf(String fieldReference) { + return new Derivative(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Create a new instance of {@link Derivative} for the value provided by the given expression that resolves to a + * numeric value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Derivative}. + */ + public static Derivative derivativeOf(AggregationExpression expression) { + return new Derivative(Collections.singletonMap("input", expression)); + } + + public static Derivative derivativeOfValue(Number value) { + return new Derivative(Collections.singletonMap("input", value)); + } + + @Contract("_ -> new") + public Derivative unit(String unit) { + return new Derivative(append("unit", unit)); + } + + @Override + protected String getMongoMethod() { + return "$derivative"; + } + } + + /** + * Value object to represent an {@link AggregationExpression expression} that calculates the approximation for the + * mathematical integral value. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Integral extends AbstractAggregationExpression { + + private Integral(Object value) { + super(value); + } + + /** + * Create a new instance of {@link Integral} for the value stored at the given field holding a numeric value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Integral}. + */ + public static Integral integralOf(String fieldReference) { + return new Integral(Collections.singletonMap("input", Fields.field(fieldReference))); + } + + /** + * Create a new instance of {@link Integral} for the value provided by the given expression that resolves to a + * numeric value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Integral}. + */ + public static Integral integralOf(AggregationExpression expression) { + return new Integral(Collections.singletonMap("input", expression)); + } + + /** + * Set the unit of measure. + * + * @param unit the unit of measure. + * @return new instance of {@link Integral}. + */ + @Contract("_ -> new") + public Integral unit(String unit) { + return new Integral(append("unit", unit)); + } + + @Override + protected String getMongoMethod() { + return "$integral"; + } + } + + /** + * The unit of measure for computations that operate upon angles. + * + * @author Christoph Strobl + * @since 3.3 + */ + public enum AngularUnit { + RADIANS, DEGREES + } + + /** + * An {@link AggregationExpression expression} that calculates the sine of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Sin extends AbstractAggregationExpression { + + private Sin(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularUnit#RADIANS radians}.
          + * Use {@code sinhOf("angle", DEGREES)} as shortcut for + * + *
          +		 * { $sinh : { $degreesToRadians : "$angle" } }
          +		 * 
          + * + * . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(String fieldReference) { + return sinOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(String fieldReference, AngularUnit unit) { + return sin(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(AggregationExpression expression) { + return sinOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sinOf(AggregationExpression expression, AngularUnit unit) { + return sin(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Sin}. + */ + public static Sin sin(Object value) { + return sin(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the sine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sin sin(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Sin(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Sin(value); + } + + @Override + protected String getMongoMethod() { + return "$sin"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Sinh extends AbstractAggregationExpression { + + private Sinh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(String fieldReference) { + return sinhOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularUnit unit}.
          + * Use {@code sinhOf("angle", DEGREES)} as shortcut for + * + *
          +		 * { $sinh : { $degreesToRadians : "$angle" } }
          +		 * 
          + * + * . + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(String fieldReference, AngularUnit unit) { + return sinh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularUnit#RADIANS}.
          + * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(AggregationExpression expression) { + return sinhOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinhOf(AggregationExpression expression, AngularUnit unit) { + return sinh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Sin}. + */ + public static Sinh sinh(Object value) { + return sinh(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic sine of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Sin}. + */ + public static Sinh sinh(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Sinh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Sinh(value); + } + + @Override + protected String getMongoMethod() { + return "$sinh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse sine of a value. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ASin extends AbstractAggregationExpression { + + private ASin(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ASin(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value.
          + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(AggregationExpression expression) { + return new ASin(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse sine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ASin}. + */ + public static ASin asinOf(Number value) { + return new ASin(value); + } + + @Override + protected String getMongoMethod() { + return "$asin"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse hyperbolic sine of a value + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ASinh extends AbstractAggregationExpression { + + private ASinh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(String fieldReference) { + return new ASinh(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value.
          + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(AggregationExpression expression) { + return new ASinh(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic sine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ASinh}. + */ + public static ASinh asinhOf(Object value) { + return new ASinh(value); + } + + @Override + protected String getMongoMethod() { + return "$asinh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the cosine of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Cos extends AbstractAggregationExpression { + + private Cos(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularUnit#RADIANS radians}.
          + * Use {@code cosOf("angle", DEGREES)} as shortcut for + * + *
          +		 * { $cos : { $degreesToRadians : "$angle" } }
          +		 * 
          + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(String fieldReference) { + return cosOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(String fieldReference, AngularUnit unit) { + return cos(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(AggregationExpression expression) { + return cosOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cosOf(AggregationExpression expression, AngularUnit unit) { + return cos(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Cos}. + */ + public static Cos cos(Object value) { + return cos(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the cosine of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cos}. + */ + public static Cos cos(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Cos(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Cos(value); + } + + @Override + protected String getMongoMethod() { + return "$cos"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Cosh extends AbstractAggregationExpression { + + private Cosh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(String fieldReference) { + return coshOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularUnit unit}.
          + * Use {@code coshOf("angle", DEGREES)} as shortcut for + * + *
          +		 * { $cosh : { $degreesToRadians : "$angle" } }
          +		 * 
          + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(String fieldReference, AngularUnit unit) { + return cosh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularUnit#RADIANS}.
          + * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(AggregationExpression expression) { + return coshOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh coshOf(AggregationExpression expression, AngularUnit unit) { + return cosh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Cosh}. + */ + public static Cosh cosh(Object value) { + return cosh(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic cosine of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Cosh}. + */ + public static Cosh cosh(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Cosh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Cosh(value); + } + + @Override + protected String getMongoMethod() { + return "$cosh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse cosine of a value. + * + * @author Divya Srivastava + * @since 3.4 + */ + public static class ACos extends AbstractAggregationExpression { + + private ACos(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse cosine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ACos}. + */ + public static ACos acosOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ACos(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse cosine of a value.
          + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ACos}. + */ + public static ACos acosOf(AggregationExpression expression) { + return new ACos(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse cosine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ACos}. + */ + public static ACos acosOf(Number value) { + return new ACos(value); + } + + @Override + protected String getMongoMethod() { + return "$acos"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse hyperbolic cosine of a value. + * + * @author Divya Srivastava + * @since 3.4 + */ + public static class ACosh extends AbstractAggregationExpression { + + private ACosh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic cosine of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ACosh}. + */ + public static ACosh acoshOf(String fieldReference) { + return new ACosh(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic cosine of a value.
          + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ACosh}. + */ + public static ACosh acoshOf(AggregationExpression expression) { + return new ACosh(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic cosine of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ACosh}. + */ + public static ACosh acoshOf(Object value) { + return new ACosh(value); + } + + @Override + protected String getMongoMethod() { + return "$acosh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the tangent of a value that is measured in radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Tan extends AbstractAggregationExpression { + + private Tan(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularUnit#RADIANS radians}.
          + * Use {@code tanOf("angle", DEGREES)} as shortcut for + * + *
          +		 * { $tan : { $degreesToRadians : "$angle" } }
          +		 * 
          + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(String fieldReference) { + return tanOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(String fieldReference, AngularUnit unit) { + return tan(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(AggregationExpression expression) { + return tanOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tanOf(AggregationExpression expression, AngularUnit unit) { + return tan(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @return new instance of {@link Tan}. + */ + public static Tan tan(Object value) { + return tan(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the tangent of a value that is measured in the given + * {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tan}. + */ + public static Tan tan(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Tan(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Tan(value); + } + + @Override + protected String getMongoMethod() { + return "$tan"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse tangent of a value. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ATan extends AbstractAggregationExpression { + + private ATan(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ATan(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanOf(AggregationExpression expression) { + return new ATan(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ATan}. + */ + public static ATan atanOf(Number value) { + return new ATan(value); + } + + @Override + protected String getMongoMethod() { + return "$atan"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse tangent of y / x, where y and x are the + * first and second values passed to the expression respectively. + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ATan2 extends AbstractAggregationExpression { + + private ATan2(List value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ATan2}. + */ + public static ATan2 valueOf(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ATan2(asFields(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ATan2}. + */ + public static ATan2 valueOf(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new ATan2((Collections.singletonList(expression))); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param fieldReference anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves + * to a numeric value. + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new ATan2(append(Fields.field(fieldReference))); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param expression anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to + * a numeric value. + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + return new ATan2(append(expression)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse tangent of of y / x, where y and x are + * the first and second values passed to the expression respectively. + * + * @param value of type {@link Number} + * @return new instance of {@link ATan2}. + */ + public ATan2 atan2of(Number value) { + return new ATan2(append(value)); + } + + @Override + protected String getMongoMethod() { + return "$atan2"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class Tanh extends AbstractAggregationExpression { + + private Tanh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(String fieldReference) { + return tanhOf(fieldReference, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularUnit unit}.
          + * Use {@code tanhOf("angle", DEGREES)} as shortcut for + * + *
          +		 * { $tanh : { $degreesToRadians : "$angle" } }
          +		 * 
          + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(String fieldReference, AngularUnit unit) { + return tanh(Fields.field(fieldReference), unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}.
          + * Use {@code sinhOf("angle", DEGREES)} as shortcut for eg. + * {@code sinhOf(ConvertOperators.valueOf("angle").degreesToRadians())}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(AggregationExpression expression) { + return tanhOf(expression, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanhOf(AggregationExpression expression, AngularUnit unit) { + return tanh(expression, unit); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * {@link AngularUnit#RADIANS}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanh(Object value) { + return tanh(value, AngularUnit.RADIANS); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the hyperbolic tangent of a value that is measured in + * the given {@link AngularUnit unit}. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value + * @param unit the unit of measure used by the value of the given field. + * @return new instance of {@link Tanh}. + */ + public static Tanh tanh(Object value, AngularUnit unit) { + + if (ObjectUtils.nullSafeEquals(AngularUnit.DEGREES, unit)) { + return new Tanh(ConvertOperators.DegreesToRadians.degreesToRadians(value)); + } + return new Tanh(value); + } + + @Override + protected String getMongoMethod() { + return "$tanh"; + } + } + + /** + * An {@link AggregationExpression expression} that calculates the inverse hyperbolic tangent of a value + * + * @author Divya Srivastava + * @since 3.3 + */ + public static class ATanh extends AbstractAggregationExpression { + + private ATanh(Object value) { + super(value); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a value. + * + * @param fieldReference the name of the {@link Field field} that resolves to a numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhOf(String fieldReference) { + return new ATanh(Fields.field(fieldReference)); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a value.
          + * + * @param expression the {@link AggregationExpression expression} that resolves to a numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhOf(AggregationExpression expression) { + return new ATanh(expression); + } + + /** + * Creates a new {@link AggregationExpression} that calculates the inverse hyperbolic tangent of a value. + * + * @param value anything ({@link Field field}, {@link AggregationExpression expression}, ...) that resolves to a + * numeric value. + * @return new instance of {@link ATanh}. + */ + public static ATanh atanhOf(Object value) { + return new ATanh(value); + } + + @Override + protected String getMongoMethod() { + return "$atanh"; + } + } + + /** + * {@link Rand} returns a floating value between 0 and 1. + * + * @author Mushtaq Ahmed + * @since 3.3 + */ + public static class Rand implements AggregationExpression { + + @Override + public Document toDocument(AggregationOperationContext context) { + return new Document("$rand", new Document()); + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArrayOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArrayOperators.java index 81adc4035c..85952d8f39 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArrayOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ArrayOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,15 +17,19 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; import org.bson.Document; +import org.jspecify.annotations.Nullable; import org.springframework.data.domain.Range; +import org.springframework.data.domain.Sort; +import org.springframework.data.domain.Sort.Direction; import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.AsBuilder; import org.springframework.data.mongodb.core.aggregation.ArrayOperators.Reduce.PropertyExpression; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; -import org.springframework.lang.Nullable; +import org.springframework.lang.Contract; import org.springframework.util.Assert; /** @@ -33,6 +37,8 @@ * * @author Christoph Strobl * @author Mark Paluch + * @author Shashank Sharma + * @author Divya Srivastava * @since 1.0 */ public class ArrayOperators { @@ -41,7 +47,7 @@ public class ArrayOperators { * Take the array referenced by given {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ArrayOperatorFactory}. */ public static ArrayOperatorFactory arrayOf(String fieldReference) { return new ArrayOperatorFactory(fieldReference); @@ -51,19 +57,31 @@ public static ArrayOperatorFactory arrayOf(String fieldReference) { * Take the array referenced resulting from the given {@link AggregationExpression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ArrayOperatorFactory}. */ public static ArrayOperatorFactory arrayOf(AggregationExpression expression) { return new ArrayOperatorFactory(expression); } + /** + * Take the given {@link Collection values} {@link AggregationExpression}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link ArrayOperatorFactory}. + * @since 2.2 + */ + public static ArrayOperatorFactory arrayOf(Collection values) { + return new ArrayOperatorFactory(values); + } + /** * @author Christoph Strobl */ public static class ArrayOperatorFactory { - private final String fieldReference; - private final AggregationExpression expression; + private final @Nullable String fieldReference; + private final @Nullable AggregationExpression expression; + private final @Nullable Collection values; /** * Creates new {@link ArrayOperatorFactory} for given {@literal fieldReference}. @@ -72,9 +90,10 @@ public static class ArrayOperatorFactory { */ public ArrayOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; + this.values = null; } /** @@ -84,17 +103,32 @@ public ArrayOperatorFactory(String fieldReference) { */ public ArrayOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; + this.values = null; + } + + /** + * Creates new {@link ArrayOperatorFactory} for given values. + * + * @param values must not be {@literal null}. + * @since 2.2 + */ + public ArrayOperatorFactory(Collection values) { + + Assert.notNull(values, "Values must not be null"); + this.fieldReference = null; + this.expression = null; + this.values = values; } /** * Creates new {@link AggregationExpression} that takes the associated array and returns the element at the * specified array {@literal position}. * - * @param position - * @return + * @param position the element index. + * @return new instance of {@link ArrayElemAt}. */ public ArrayElemAt elementAt(int position) { return createArrayElemAt().elementAt(position); @@ -105,11 +139,11 @@ public ArrayElemAt elementAt(int position) { * resulting form the given {@literal expression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ArrayElemAt}. */ public ArrayElemAt elementAt(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createArrayElemAt().elementAt(expression); } @@ -118,16 +152,22 @@ public ArrayElemAt elementAt(AggregationExpression expression) { * defined by the referenced {@literal field}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ArrayElemAt}. */ public ArrayElemAt elementAt(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createArrayElemAt().elementAt(fieldReference); } + @SuppressWarnings("NullAway") private ArrayElemAt createArrayElemAt() { - return usesFieldRef() ? ArrayElemAt.arrayOf(fieldReference) : ArrayElemAt.arrayOf(expression); + + if (usesFieldRef()) { + return ArrayElemAt.arrayOf(fieldReference); + } + + return usesExpression() ? ArrayElemAt.arrayOf(expression) : ArrayElemAt.arrayOf(values); } /** @@ -135,11 +175,11 @@ private ArrayElemAt createArrayElemAt() { * {@literal arrayFieldReference} to it. * * @param arrayFieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ConcatArrays}. */ public ConcatArrays concat(String arrayFieldReference) { - Assert.notNull(arrayFieldReference, "ArrayFieldReference must not be null!"); + Assert.notNull(arrayFieldReference, "ArrayFieldReference must not be null"); return createConcatArrays().concat(arrayFieldReference); } @@ -148,53 +188,86 @@ public ConcatArrays concat(String arrayFieldReference) { * the given {@literal expression} to it. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ConcatArrays}. */ public ConcatArrays concat(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createConcatArrays().concat(expression); } + @SuppressWarnings("NullAway") private ConcatArrays createConcatArrays() { - return usesFieldRef() ? ConcatArrays.arrayOf(fieldReference) : ConcatArrays.arrayOf(expression); + + if (usesFieldRef()) { + return ConcatArrays.arrayOf(fieldReference); + } + + return usesExpression() ? ConcatArrays.arrayOf(expression) : ConcatArrays.arrayOf(values); } /** * Creates new {@link AggregationExpression} that takes the associated array and selects a subset of the array to * return based on the specified condition. * - * @return + * @return new instance of {@link AsBuilder} to create a {@link Filter}. */ + @SuppressWarnings("NullAway") public AsBuilder filter() { - return Filter.filter(fieldReference); + + if (usesFieldRef()) { + return Filter.filter(fieldReference); + } + + if (usesExpression()) { + return Filter.filter(expression); + } + + Assert.state(values != null, "Values must not be null"); + return Filter.filter(new ArrayList<>(values)); } /** * Creates new {@link AggregationExpression} that takes the associated array and an check if its an array. * - * @return + * @return new instance of {@link IsArray}. */ + @SuppressWarnings("NullAway") public IsArray isArray() { + + Assert.state(values == null, "Does it make sense to call isArray on an array; Maybe just skip it"); + return usesFieldRef() ? IsArray.isArray(fieldReference) : IsArray.isArray(expression); } /** * Creates new {@link AggregationExpression} that takes the associated array and retrieves its length. * - * @return + * @return new instance of {@link Size}. */ + @SuppressWarnings("NullAway") public Size length() { - return usesFieldRef() ? Size.lengthOfArray(fieldReference) : Size.lengthOfArray(expression); + + if (usesFieldRef()) { + return Size.lengthOfArray(fieldReference); + } + + return usesExpression() ? Size.lengthOfArray(expression) : Size.lengthOfArray(values); } /** * Creates new {@link AggregationExpression} that takes the associated array and selects a subset from it. * - * @return + * @return new instance of {@link Slice}. */ + @SuppressWarnings("NullAway") public Slice slice() { - return usesFieldRef() ? Slice.sliceArrayOf(fieldReference) : Slice.sliceArrayOf(expression); + + if (usesFieldRef()) { + return Slice.sliceArrayOf(fieldReference); + } + + return usesExpression() ? Slice.sliceArrayOf(expression) : Slice.sliceArrayOf(values); } /** @@ -202,20 +275,33 @@ public Slice slice() { * value and returns the array index (zero-based) of the first occurrence. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link IndexOfArray}. */ + @SuppressWarnings("NullAway") public IndexOfArray indexOf(Object value) { - return usesFieldRef() ? IndexOfArray.arrayOf(fieldReference).indexOf(value) - : IndexOfArray.arrayOf(expression).indexOf(value); + + if (usesFieldRef()) { + return IndexOfArray.arrayOf(fieldReference).indexOf(value); + } + + return usesExpression() ? IndexOfArray.arrayOf(expression).indexOf(value) + : IndexOfArray.arrayOf(values).indexOf(value); } /** * Creates new {@link AggregationExpression} that returns an array with the elements in reverse order. * - * @return + * @return new instance of {@link ReverseArray}. */ + @SuppressWarnings("NullAway") public ReverseArray reverse() { - return usesFieldRef() ? ReverseArray.reverseArrayOf(fieldReference) : ReverseArray.reverseArrayOf(expression); + + if (usesFieldRef()) { + return ReverseArray.reverseArrayOf(fieldReference); + } + + return usesExpression() ? ReverseArray.reverseArrayOf(expression) + : ReverseArray.reverseArrayOf(Collections.singletonList(values)); } /** @@ -223,8 +309,9 @@ public ReverseArray reverse() { * an array and combines them into a single value. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ReduceInitialValueBuilder} to create {@link Reduce}. */ + @SuppressWarnings("NullAway") public ArrayOperatorFactory.ReduceInitialValueBuilder reduce(AggregationExpression expression) { return initialValue -> (usesFieldRef() ? Reduce.arrayOf(fieldReference) @@ -235,25 +322,66 @@ public ArrayOperatorFactory.ReduceInitialValueBuilder reduce(AggregationExpressi * Start creating new {@link AggregationExpression} that applies an {@link AggregationExpression} to each element in * an array and combines them into a single value. * - * @param expressions - * @return + * @param expressions must not be {@literal null}. + * @return new instance of {@link ReduceInitialValueBuilder} to create {@link Reduce}. */ + @SuppressWarnings("NullAway") public ArrayOperatorFactory.ReduceInitialValueBuilder reduce(PropertyExpression... expressions) { return initialValue -> (usesFieldRef() ? Reduce.arrayOf(fieldReference) : Reduce.arrayOf(expression)) .withInitialValue(initialValue).reduce(expressions); } + /** + * Creates new {@link AggregationExpression} that takes the associated array and sorts it by the given {@link Sort + * order}. + * + * @return new instance of {@link SortArray}. + * @since 4.0 + */ + @SuppressWarnings("NullAway") + public SortArray sort(Sort sort) { + + if (usesFieldRef()) { + return SortArray.sortArrayOf(fieldReference).by(sort); + } + + return (usesExpression() ? SortArray.sortArrayOf(expression) : SortArray.sortArray(values)).by(sort); + } + + /** + * Creates new {@link AggregationExpression} that takes the associated array and sorts it by the given + * {@link Direction order}. + * + * @return new instance of {@link SortArray}. + * @since 4.5 + */ + @SuppressWarnings("NullAway") + public SortArray sort(Direction direction) { + + if (usesFieldRef()) { + return SortArray.sortArrayOf(fieldReference).direction(direction); + } + + return (usesExpression() ? SortArray.sortArrayOf(expression) : SortArray.sortArray(values)).direction(direction); + } + /** * Creates new {@link AggregationExpression} that transposes an array of input arrays so that the first element of * the output array would be an array containing, the first element of the first input array, the first element of * the second input array, etc. * * @param arrays must not be {@literal null}. - * @return + * @return new instance of {@link Zip}. */ + @SuppressWarnings("NullAway") public Zip zipWith(Object... arrays) { - return (usesFieldRef() ? Zip.arrayOf(fieldReference) : Zip.arrayOf(expression)).zip(arrays); + + if (usesFieldRef()) { + return Zip.arrayOf(fieldReference).zip(arrays); + } + + return (usesExpression() ? Zip.arrayOf(expression) : Zip.arrayOf(values)).zip(arrays); } /** @@ -261,10 +389,67 @@ public Zip zipWith(Object... arrays) { * associated array. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link In}. */ + @SuppressWarnings("NullAway") public In containsValue(Object value) { - return (usesFieldRef() ? In.arrayOf(fieldReference) : In.arrayOf(expression)).containsValue(value); + + if (usesFieldRef()) { + return In.arrayOf(fieldReference).containsValue(value); + } + + return (usesExpression() ? In.arrayOf(expression) : In.arrayOf(values)).containsValue(value); + } + + /** + * Creates new {@link AggregationExpression} that converts the associated expression into an object. + * NOTE: Requires MongoDB 3.6 or later. + * + * @return new instance of {@link ArrayToObject}. + * @since 2.1 + */ + @SuppressWarnings("NullAway") + public ArrayToObject toObject() { + + if (usesFieldRef()) { + return ArrayToObject.arrayValueOfToObject(fieldReference); + } + + return usesExpression() ? ArrayToObject.arrayValueOfToObject(expression) : ArrayToObject.arrayToObject(values); + } + + /** + * Creates new {@link AggregationExpression} that return the first element in the associated array. + * NOTE: Requires MongoDB 4.4 or later. + * + * @return new instance of {@link First}. + * @since 3.4 + */ + @SuppressWarnings("NullAway") + public First first() { + + if (usesFieldRef()) { + return First.firstOf(fieldReference); + } + + return usesExpression() ? First.firstOf(expression) : First.first(values); + } + + /** + * Creates new {@link AggregationExpression} that return the last element in the given array. NOTE: + * Requires MongoDB 4.4 or later. + * + * @return new instance of {@link Last}. + * @since 3.4 + */ + @SuppressWarnings("NullAway") + public Last last() { + + if (usesFieldRef()) { + return Last.lastOf(fieldReference); + } + + return usesExpression() ? Last.lastOf(expression) : Last.last(values); } /** @@ -281,9 +466,20 @@ public interface ReduceInitialValueBuilder { Reduce startingWith(Object initialValue); } + /** + * @return {@literal true} if {@link #fieldReference} is not {@literal null}. + */ private boolean usesFieldRef() { return fieldReference != null; } + + /** + * @return {@literal true} if {@link #expression} is not {@literal null}. + * @since 2.2 + */ + private boolean usesExpression() { + return expression != null; + } } /** @@ -306,11 +502,11 @@ protected String getMongoMethod() { * Creates new {@link ArrayElemAt}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ArrayElemAt}. */ public static ArrayElemAt arrayOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new ArrayElemAt(asFields(fieldReference)); } @@ -318,27 +514,61 @@ public static ArrayElemAt arrayOf(String fieldReference) { * Creates new {@link ArrayElemAt}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ArrayElemAt}. */ public static ArrayElemAt arrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ArrayElemAt(Collections.singletonList(expression)); } + /** + * Creates new {@link ArrayElemAt}. + * + * @param values The array members. Must not be {@literal null}. + * @return new instance of {@link ArrayElemAt}. + * @since 2.2 + */ + public static ArrayElemAt arrayOf(Collection values) { + + Assert.notNull(values, "Values must not be null"); + return new ArrayElemAt(Collections.singletonList(values)); + } + + /** + * Use the element with given index number. + * + * @param index the index number + * @return new instance of {@link ArrayElemAt}. + */ + @Contract("_ -> new") public ArrayElemAt elementAt(int index) { return new ArrayElemAt(append(index)); } + /** + * Use the element at the index number evaluated from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ArrayElemAt}. + */ + @Contract("_ -> new") public ArrayElemAt elementAt(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ArrayElemAt(append(expression)); } + /** + * Use the element at the index number taken from the given field. + * + * @param arrayFieldReference the field name. + * @return new instance of {@link ArrayElemAt}. + */ + @Contract("_ -> new") public ArrayElemAt elementAt(String arrayFieldReference) { - Assert.notNull(arrayFieldReference, "ArrayReference must not be null!"); + Assert.notNull(arrayFieldReference, "ArrayReference must not be null"); return new ArrayElemAt(append(Fields.field(arrayFieldReference))); } } @@ -363,11 +593,11 @@ protected String getMongoMethod() { * Creates new {@link ConcatArrays}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ConcatArrays}. */ public static ConcatArrays arrayOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new ConcatArrays(asFields(fieldReference)); } @@ -375,23 +605,50 @@ public static ConcatArrays arrayOf(String fieldReference) { * Creates new {@link ConcatArrays}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ConcatArrays}. */ public static ConcatArrays arrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ConcatArrays(Collections.singletonList(expression)); } + /** + * Creates new {@link ConcatArrays}. + * + * @param values The array members. Must not be {@literal null}. + * @return new instance of {@link ConcatArrays}. + * @since 2.2 + */ + public static ConcatArrays arrayOf(Collection values) { + + Assert.notNull(values, "Values must not be null"); + return new ConcatArrays(Collections.singletonList(values)); + } + + /** + * Concat with the array stored at the given field. + * + * @param arrayFieldReference must not be {@literal null}. + * @return new instance of {@link ConcatArrays}. + */ + @Contract("_ -> new") public ConcatArrays concat(String arrayFieldReference) { - Assert.notNull(arrayFieldReference, "ArrayFieldReference must not be null!"); + Assert.notNull(arrayFieldReference, "ArrayFieldReference must not be null"); return new ConcatArrays(append(Fields.field(arrayFieldReference))); } + /** + * Concat with the array resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ConcatArrays}. + */ + @Contract("_ -> new") public ConcatArrays concat(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ConcatArrays(append(expression)); } } @@ -421,7 +678,7 @@ private Filter() { */ public static AsBuilder filter(String field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); return filter(Fields.field(field)); } @@ -433,36 +690,47 @@ public static AsBuilder filter(String field) { */ public static AsBuilder filter(Field field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); return new FilterExpressionBuilder().filter(field); } + /** + * Set the {@link AggregationExpression} resolving to an array to apply the {@code $filter} to. + * + * @param expression must not be {@literal null}. + * @return never {@literal null}. + * @since 4.2 + */ + public static AsBuilder filter(AggregationExpression expression) { + + Assert.notNull(expression, "Field must not be null"); + return new FilterExpressionBuilder().filter(expression); + } + /** * Set the {@literal values} to apply the {@code $filter} to. * * @param values must not be {@literal null}. - * @return + * @return new instance of {@link AsBuilder} to create the {@link Filter}. */ public static AsBuilder filter(List values) { - Assert.notNull(values, "Values must not be null!"); + Assert.notNull(values, "Values must not be null"); return new FilterExpressionBuilder().filter(values); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(final AggregationOperationContext context) { + + Assert.notNull(as, "As must be set first"); return toFilter(ExposedFields.from(as), context); } + @SuppressWarnings("NullAway") private Document toFilter(ExposedFields exposedFields, AggregationOperationContext context) { Document filterExpression = new Document(); - InheritingExposedFieldsAggregationOperationContext operationContext = new InheritingExposedFieldsAggregationOperationContext( - exposedFields, context); + AggregationOperationContext operationContext = context.inheritAndExpose(exposedFields); filterExpression.putAll(context.getMappedObject(new Document("input", getMappedInput(context)))); filterExpression.put("as", as.getTarget()); @@ -472,19 +740,28 @@ private Document toFilter(ExposedFields exposedFields, AggregationOperationConte return new Document("$filter", filterExpression); } - private Object getMappedInput(AggregationOperationContext context) { - return input instanceof Field ? context.getReference((Field) input).toString() : input; + private @Nullable Object getMappedInput(AggregationOperationContext context) { + + if (input instanceof Field field) { + return context.getReference(field).toString(); + } + + if (input instanceof AggregationExpression expression) { + return expression.toDocument(context); + } + + return input; } - private Object getMappedCondition(AggregationOperationContext context) { + private @Nullable Object getMappedCondition(AggregationOperationContext context) { - if (!(condition instanceof AggregationExpression)) { + if (!(condition instanceof AggregationExpression aggregationExpression)) { return condition; } NestedDelegatingExpressionAggregationOperationContext nea = new NestedDelegatingExpressionAggregationOperationContext( - context); - return ((AggregationExpression) condition).toDocument(nea); + context, Collections.singleton(as)); + return aggregationExpression.toDocument(nea); } /** @@ -507,6 +784,15 @@ public interface InputBuilder { * @return */ AsBuilder filter(Field field); + + /** + * Set the {@link AggregationExpression} resolving to an array to apply the {@code $filter} to. + * + * @param expression must not be {@literal null}. + * @return + * @since 4.1.1 + */ + AsBuilder filter(AggregationExpression expression); } /** @@ -518,7 +804,7 @@ public interface AsBuilder { * Set the {@literal variableName} for the elements in the input array. * * @param variableName must not be {@literal null}. - * @return + * @return never {@literal null}. */ ConditionBuilder as(String variableName); } @@ -532,7 +818,7 @@ public interface ConditionBuilder { * Set the {@link AggregationExpression} that determines whether to include the element in the resulting array. * * @param expression must not be {@literal null}. - * @return + * @return never {@literal null}. */ Filter by(AggregationExpression expression); @@ -540,7 +826,7 @@ public interface ConditionBuilder { * Set the {@literal expression} that determines whether to include the element in the resulting array. * * @param expression must not be {@literal null}. - * @return + * @return never {@literal null}. */ Filter by(String expression); @@ -548,7 +834,7 @@ public interface ConditionBuilder { * Set the {@literal expression} that determines whether to include the element in the resulting array. * * @param expression must not be {@literal null}. - * @return + * @return never {@literal null}. */ Filter by(Document expression); } @@ -567,80 +853,68 @@ static final class FilterExpressionBuilder implements InputBuilder, AsBuilder, C /** * Creates new {@link InputBuilder}. * - * @return + * @return new instance of {@link FilterExpressionBuilder}. */ public static InputBuilder newBuilder() { return new FilterExpressionBuilder(); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.InputBuilder#filter(java.util.List) - */ @Override + @Contract("_ -> this") public AsBuilder filter(List array) { - Assert.notNull(array, "Array must not be null!"); - filter.input = new ArrayList(array); + Assert.notNull(array, "Array must not be null"); + filter.input = new ArrayList<>(array); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.InputBuilder#filter(org.springframework.data.mongodb.core.aggregation.Field) - */ @Override + @Contract("_ -> this") public AsBuilder filter(Field field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); filter.input = field; return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.AsBuilder#as(java.lang.String) - */ @Override + @Contract("_ -> this") + public AsBuilder filter(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + filter.input = expression; + return this; + } + + @Override + @Contract("_ -> this") public ConditionBuilder as(String variableName) { - Assert.notNull(variableName, "Variable name must not be null!"); + Assert.notNull(variableName, "Variable name must not be null"); filter.as = new ExposedField(variableName, true); return this; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.ConditionBuilder#by(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override public Filter by(AggregationExpression condition) { - Assert.notNull(condition, "Condition must not be null!"); + Assert.notNull(condition, "Condition must not be null"); filter.condition = condition; return filter; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.ConditionBuilder#by(java.lang.String) - */ @Override public Filter by(String expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); filter.condition = expression; return filter; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ArrayOperators.Filter.ConditionBuilder#by(org.bson.Document) - */ @Override public Filter by(Document expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); filter.condition = expression; return filter; } @@ -667,11 +941,11 @@ protected String getMongoMethod() { * Creates new {@link IsArray}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link IsArray}. */ public static IsArray isArray(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new IsArray(Fields.field(fieldReference)); } @@ -679,11 +953,11 @@ public static IsArray isArray(String fieldReference) { * Creates new {@link IsArray}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link IsArray}. */ public static IsArray isArray(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new IsArray(expression); } } @@ -708,11 +982,11 @@ protected String getMongoMethod() { * Creates new {@link Size}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Size}. */ public static Size lengthOfArray(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Size(Fields.field(fieldReference)); } @@ -720,13 +994,26 @@ public static Size lengthOfArray(String fieldReference) { * Creates new {@link Size}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Size}. */ public static Size lengthOfArray(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Size(expression); } + + /** + * Creates new {@link Size}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link Size}. + * @since 2.2 + */ + public static Size lengthOfArray(Collection values) { + + Assert.notNull(values, "Values must not be null"); + return new Size(Collections.singletonList(values)); + } } /** @@ -749,11 +1036,11 @@ protected String getMongoMethod() { * Creates new {@link Slice}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Slice}. */ public static Slice sliceArrayOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Slice(asFields(fieldReference)); } @@ -761,41 +1048,103 @@ public static Slice sliceArrayOf(String fieldReference) { * Creates new {@link Slice}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Slice}. */ public static Slice sliceArrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Slice(Collections.singletonList(expression)); } - public Slice itemCount(int nrElements) { - return new Slice(append(nrElements)); + /** + * Creates new {@link Slice}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link Slice}. + * @since 2.2 + */ + public static Slice sliceArrayOf(Collection values) { + + Assert.notNull(values, "Values must not be null"); + return new Slice(Collections.singletonList(values)); } - public SliceElementsBuilder offset(final int position) { + /** + * Slice the number of elements. + * + * @param count number of elements to slice. + * @return new instance of {@link Slice}. + */ + @Contract("_ -> new") + public Slice itemCount(int count) { + return new Slice(append(count)); + } + + /** + * Slice the number of elements. + * + * @param count an {@link AggregationExpression} that evaluates to a numeric value used as number of elements to + * slice. + * @return new instance of {@link Slice}. + * @since 4.5 + */ + @Contract("_ -> new") + public Slice itemCount(AggregationExpression count) { + return new Slice(append(count)); + } - return new SliceElementsBuilder() { + /** + * Slice using offset and count. + * + * @param position the start position + * @return new instance of {@link SliceElementsBuilder} to create {@link Slice}. + */ + public SliceElementsBuilder offset(int position) { + return new SliceElementsBuilder(position); + } - @Override - public Slice itemCount(int nrElements) { - return new Slice(append(position)).itemCount(nrElements); - } - }; + /** + * Slice using offset and count. + * + * @param position the start position + * @return new instance of {@link SliceElementsBuilder} to create {@link Slice}. + */ + public SliceElementsBuilder offset(AggregationExpression position) { + return new SliceElementsBuilder(position); } /** * @author Christoph Strobl */ - public interface SliceElementsBuilder { + public class SliceElementsBuilder { + + private final Object position; + + SliceElementsBuilder(Object position) { + this.position = position; + } /** - * Set the number of elements given {@literal nrElements}. + * Set the number of elements given {@literal count}. * - * @param nrElements - * @return + * @param count number of elements to slice. + * @return new instance of {@link Slice}. */ - Slice itemCount(int nrElements); + public Slice itemCount(int count) { + return new Slice(append(position)).itemCount(count); + } + + /** + * Slice the number of elements. + * + * @param count an {@link AggregationExpression} that evaluates to a numeric value used as number of elements to + * slice. + * @return new instance of {@link Slice}. + * @since 4.5 + */ + public Slice itemCount(AggregationExpression count) { + return new Slice(append(position)).itemCount(count); + } } } @@ -819,11 +1168,11 @@ protected String getMongoMethod() { * Start creating new {@link IndexOfArray}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link IndexOfArray}. */ public static IndexOfArrayBuilder arrayOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new IndexOfArrayBuilder(Fields.field(fieldReference)); } @@ -831,14 +1180,34 @@ public static IndexOfArrayBuilder arrayOf(String fieldReference) { * Start creating new {@link IndexOfArray}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link IndexOfArray}. */ public static IndexOfArrayBuilder arrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new IndexOfArrayBuilder(expression); } + /** + * Start creating new {@link IndexOfArray}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link IndexOfArrayBuilder} to create {@link IndexOfArray}. + * @since 2.2 + */ + public static IndexOfArrayBuilder arrayOf(Collection values) { + + Assert.notNull(values, "Values must not be null"); + return new IndexOfArrayBuilder(values); + } + + /** + * Lookup within a given range. + * + * @param range the lookup range. + * @return new instance of {@link IndexOfArray}. + */ + @Contract("_ -> new") public IndexOfArray within(Range range) { return new IndexOfArray(append(AggregationUtils.toRangeValues(range))); } @@ -858,11 +1227,11 @@ private IndexOfArrayBuilder(Object targetArray) { * Set the {@literal value} to check for its index in the array. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link IndexOfArray}. */ public IndexOfArray indexOf(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new IndexOfArray(Arrays.asList(targetArray, value)); } } @@ -888,7 +1257,7 @@ protected String getMongoMethod() { * Start creating new {@link RangeOperator}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link RangeOperatorBuilder} to create {@link RangeOperator}. */ public static RangeOperatorBuilder rangeStartingAt(String fieldReference) { return new RangeOperatorBuilder(Fields.field(fieldReference)); @@ -898,7 +1267,7 @@ public static RangeOperatorBuilder rangeStartingAt(String fieldReference) { * Start creating new {@link RangeOperator}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link RangeOperatorBuilder} to create {@link RangeOperator}. */ public static RangeOperatorBuilder rangeStartingAt(AggregationExpression expression) { return new RangeOperatorBuilder(expression); @@ -908,12 +1277,13 @@ public static RangeOperatorBuilder rangeStartingAt(AggregationExpression express * Start creating new {@link RangeOperator}. * * @param value - * @return + * @return new instance of {@link RangeOperator}. */ public static RangeOperatorBuilder rangeStartingAt(long value) { return new RangeOperatorBuilder(value); } + @Contract("_ -> new") public RangeOperator withStepSize(long stepSize) { return new RangeOperator(append(stepSize)); } @@ -930,7 +1300,7 @@ private RangeOperatorBuilder(Object startPoint) { * Creates new {@link RangeOperator}. * * @param index - * @return + * @return new instance of {@link RangeOperator}. */ public RangeOperator to(long index) { return new RangeOperator(Arrays.asList(startPoint, index)); @@ -940,7 +1310,7 @@ public RangeOperator to(long index) { * Creates new {@link RangeOperator}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link RangeOperator}. */ public RangeOperator to(AggregationExpression expression) { return new RangeOperator(Arrays.asList(startPoint, expression)); @@ -950,7 +1320,7 @@ public RangeOperator to(AggregationExpression expression) { * Creates new {@link RangeOperator}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link RangeOperator}. */ public RangeOperator to(String fieldReference) { return new RangeOperator(Arrays.asList(startPoint, Fields.field(fieldReference))); @@ -978,7 +1348,7 @@ protected String getMongoMethod() { * Creates new {@link ReverseArray} given {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ReverseArray}. */ public static ReverseArray reverseArrayOf(String fieldReference) { return new ReverseArray(Fields.field(fieldReference)); @@ -988,11 +1358,22 @@ public static ReverseArray reverseArrayOf(String fieldReference) { * Creates new {@link ReverseArray} given {@link AggregationExpression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ReverseArray}. */ public static ReverseArray reverseArrayOf(AggregationExpression expression) { return new ReverseArray(expression); } + + /** + * Creates new {@link ReverseArray}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link ReverseArray}. + * @since 2.2 + */ + public static ReverseArray reverseArrayOf(Collection values) { + return new ReverseArray(values); + } } /** @@ -1013,9 +1394,6 @@ private Reduce(Object input, Object initialValue, List re this.reduceExpressions = reduceExpressions; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -1038,15 +1416,16 @@ public Document toDocument(AggregationOperationContext context) { return new Document("$reduce", document); } + @SuppressWarnings("NullAway") private Object getMappedValue(Object value, AggregationOperationContext context) { if (value instanceof Document) { return value; } - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); - } else if (value instanceof Field) { - return context.getReference(((Field) value)).toString(); + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } else if (value instanceof Field field) { + return context.getReference(field).toString(); } else { return context.getMappedObject(new Document("###val###", value)).get("###val###"); } @@ -1056,7 +1435,7 @@ private Object getMappedValue(Object value, AggregationOperationContext context) * Start creating new {@link Reduce}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link InitialValueBuilder} to create {@link Reduce}. */ public static InitialValueBuilder arrayOf(final String fieldReference) { @@ -1095,7 +1474,7 @@ public Reduce reduce(PropertyExpression... expressions) { * Start creating new {@link Reduce}. * * @param arrayValueExpression must not be {@literal null}. - * @return + * @return new instance of {@link InitialValueBuilder} to create {@link Reduce}. */ public static InitialValueBuilder arrayOf(final AggregationExpression arrayValueExpression) { @@ -1135,7 +1514,7 @@ public interface InitialValueBuilder { * Define the initial cumulative value set before in is applied to the first element of the input array. * * @param initialValue must not be {@literal null}. - * @return + * @return never {@literal null}. */ ReduceBuilder withInitialValue(Object initialValue); } @@ -1152,7 +1531,7 @@ public interface ReduceBuilder { * {@link Variable#VALUE} are available. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Reduce}. */ Reduce reduce(AggregationExpression expression); @@ -1163,7 +1542,7 @@ public interface ReduceBuilder { * {@link Variable#VALUE} are available. * * @param expressions must not be {@literal null}. - * @return + * @return new instance of {@link Reduce}. */ Reduce reduce(PropertyExpression... expressions); } @@ -1178,8 +1557,8 @@ public static class PropertyExpression implements AggregationExpression { protected PropertyExpression(String propertyName, AggregationExpression aggregationExpression) { - Assert.notNull(propertyName, "Property name must not be null!"); - Assert.notNull(aggregationExpression, "AggregationExpression must not be null!"); + Assert.notNull(propertyName, "Property name must not be null"); + Assert.notNull(aggregationExpression, "AggregationExpression must not be null"); this.propertyName = propertyName; this.aggregationExpression = aggregationExpression; @@ -1189,7 +1568,7 @@ protected PropertyExpression(String propertyName, AggregationExpression aggregat * Define a result property for an {@link AggregationExpression} used in {@link Reduce}. * * @param name must not be {@literal null}. - * @return + * @return new instance of {@link AsBuilder} to create {@link Reduce}. */ public static AsBuilder property(final String name) { @@ -1202,9 +1581,6 @@ public PropertyExpression definedAs(AggregationExpression expression) { }; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return new Document(propertyName, aggregationExpression.toDocument(context)); @@ -1219,30 +1595,21 @@ public interface AsBuilder { * Set the {@link AggregationExpression} resulting in the properties value. * * @param expression must not be {@literal null}. - * @return + * @return never {@literal null}. */ PropertyExpression definedAs(AggregationExpression expression); } } - public enum Variable implements Field { + public enum Variable implements AggregationVariable { THIS { - @Override - public String getName() { - return "$$this"; - } @Override public String getTarget() { return "$$this"; } - @Override - public boolean isAliased() { - return false; - } - @Override public String toString() { return getName(); @@ -1250,33 +1617,29 @@ public String toString() { }, VALUE { - @Override - public String getName() { - return "$$value"; - } @Override public String getTarget() { return "$$value"; } - @Override - public boolean isAliased() { - return false; - } - @Override public String toString() { return getName(); } }; + @Override + public boolean isInternal() { + return true; + } + /** * Create a {@link Field} reference to a given {@literal property} prefixed with the {@link Variable} identifier. * eg. {@code $$value.product} * * @param property must not be {@literal null}. - * @return + * @return never {@literal null}. */ public Field referringTo(final String property) { @@ -1302,6 +1665,16 @@ public String toString() { } }; } + + public static boolean isVariable(Field field) { + + for (Variable var : values()) { + if (field.getTarget().startsWith(var.getTarget())) { + return true; + } + } + return false; + } } } @@ -1325,11 +1698,11 @@ protected String getMongoMethod() { * Start creating new {@link Zip}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ZipBuilder} to create {@link Zip}. */ public static ZipBuilder arrayOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new ZipBuilder(Fields.field(fieldReference)); } @@ -1337,19 +1710,33 @@ public static ZipBuilder arrayOf(String fieldReference) { * Start creating new {@link Zip}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ZipBuilder} to create {@link Zip}. */ public static ZipBuilder arrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new ZipBuilder(expression); } + /** + * Start creating new {@link Zip}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link Zip}. + * @since 2.2 + */ + public static ZipBuilder arrayOf(Collection values) { + + Assert.notNull(values, "Expression must not be null"); + return new ZipBuilder(values); + } + /** * Create new {@link Zip} and set the {@code useLongestLength} property to {@literal true}. * - * @return + * @return new instance of {@link Zip}. */ + @Contract("-> new") public Zip useLongestLength() { return new Zip(append("useLongestLength", true)); } @@ -1358,11 +1745,12 @@ public Zip useLongestLength() { * Optionally provide a default value. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Zip}. */ + @Contract("_ -> new") public Zip defaultTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Zip(append("defaults", Fields.field(fieldReference))); } @@ -1370,11 +1758,12 @@ public Zip defaultTo(String fieldReference) { * Optionally provide a default value. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Zip}. */ + @Contract("_ -> new") public Zip defaultTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Zip(append("defaults", expression)); } @@ -1382,11 +1771,12 @@ public Zip defaultTo(AggregationExpression expression) { * Optionally provide a default value. * * @param array must not be {@literal null}. - * @return + * @return new instance of {@link Zip}. */ + @Contract("_ -> new") public Zip defaultTo(Object[] array) { - Assert.notNull(array, "Array must not be null!"); + Assert.notNull(array, "Array must not be null"); return new Zip(append("defaults", Arrays.asList(array))); } @@ -1396,7 +1786,7 @@ public static class ZipBuilder { private ZipBuilder(Object sourceArray) { - this.sourceArrays = new ArrayList(); + this.sourceArrays = new ArrayList<>(); this.sourceArrays.add(sourceArray); } @@ -1406,21 +1796,21 @@ private ZipBuilder(Object sourceArray) { * array, etc. * * @param arrays arrays to zip the referenced one with. must not be {@literal null}. - * @return + * @return new instance of {@link Zip}. */ public Zip zip(Object... arrays) { - Assert.notNull(arrays, "Arrays must not be null!"); + Assert.notNull(arrays, "Arrays must not be null"); for (Object value : arrays) { - if (value instanceof String) { - sourceArrays.add(Fields.field((String) value)); + if (value instanceof String stringValue) { + sourceArrays.add(Fields.field(stringValue)); } else { sourceArrays.add(value); } } - return new Zip(Collections. singletonMap("inputs", sourceArrays)); + return new Zip(Collections.singletonMap("inputs", sourceArrays)); } } } @@ -1429,6 +1819,10 @@ public Zip zip(Object... arrays) { * {@link AggregationExpression} for {@code $in}. * * @author Christoph Strobl + * @author Shashank Sharma + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/in/ + * @since 2.2 */ public static class In extends AbstractAggregationExpression { @@ -1445,20 +1839,16 @@ protected String getMongoMethod() { * Start creating {@link In}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link InBuilder} to create {@link In}. */ - public static InBuilder arrayOf(final String fieldReference) { + public static InBuilder arrayOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - - return new InBuilder() { + Assert.notNull(fieldReference, "FieldReference must not be null"); - @Override - public In containsValue(Object value) { + return value -> { - Assert.notNull(value, "Value must not be null!"); - return new In(Arrays.asList(value, Fields.field(fieldReference))); - } + Assert.notNull(value, "Value must not be null"); + return new In(Arrays.asList(value, Fields.field(fieldReference))); }; } @@ -1466,20 +1856,36 @@ public In containsValue(Object value) { * Start creating {@link In}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link InBuilder} to create {@link In}. */ - public static InBuilder arrayOf(final AggregationExpression expression) { + public static InBuilder arrayOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); - return new InBuilder() { + return value -> { - @Override - public In containsValue(Object value) { + Assert.notNull(value, "Value must not be null"); - Assert.notNull(value, "Value must not be null!"); - return new In(Arrays.asList(value, expression)); - } + return new In(Arrays.asList(value, expression)); + }; + } + + /** + * Support for Aggregation In Search an Element in List of Objects to Filter Start creating {@link In}. + * + * @param values must not be {@literal null}. + * @return new instance of {@link InBuilder}. + * @since 2.2 + */ + public static InBuilder arrayOf(Collection values) { + + Assert.notNull(values, "Values must not be null"); + + return value -> { + + Assert.notNull(value, "Value must not be null"); + + return new In(Arrays.asList(value, values)); }; } @@ -1492,9 +1898,252 @@ public interface InBuilder { * Set the {@literal value} to check for existence in the array. * * @param value must not be {@literal value}. - * @return + * @return new instance of {@link In}. */ In containsValue(Object value); } } + + /** + * {@link AggregationExpression} for {@code $arrayToObject} that transforms an array into a single document.
          + * NOTE: Requires MongoDB 3.6 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/arrayToObject/ + * @since 2.1 + */ + public static class ArrayToObject extends AbstractAggregationExpression { + + private ArrayToObject(Object value) { + super(value); + } + + /** + * Converts the given array (e.g. an array of two-element arrays, a field reference to an array,...) to an object. + * + * @param array must not be {@literal null}. + * @return new instance of {@link ArrayToObject}. + */ + public static ArrayToObject arrayToObject(Object array) { + return new ArrayToObject(array); + } + + /** + * Converts the array pointed to by the given {@link Field field reference} to an object. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link ArrayToObject}. + */ + public static ArrayToObject arrayValueOfToObject(String fieldReference) { + return new ArrayToObject(Fields.field(fieldReference)); + } + + /** + * Converts the result array of the given {@link AggregationExpression expression} to an object. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link ArrayToObject}. + */ + public static ArrayToObject arrayValueOfToObject(AggregationExpression expression) { + return new ArrayToObject(expression); + } + + @Override + protected String getMongoMethod() { + return "$arrayToObject"; + } + } + + /** + * {@link AggregationExpression} for {@code $first} that returns the first element in an array.
          + * NOTE: Requires MongoDB 4.4 or later. + * + * @author Divya Srivastava + * @author Christoph Strobl + * @since 3.4 + */ + public static class First extends AbstractAggregationExpression { + + private First(Object value) { + super(value); + } + + /** + * Returns the first element in the given array. + * + * @param array must not be {@literal null}. + * @return new instance of {@link First}. + */ + public static First first(Object array) { + return new First(array); + } + + /** + * Returns the first element in the array pointed to by the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link First}. + */ + public static First firstOf(String fieldReference) { + return new First(Fields.field(fieldReference)); + } + + /** + * Returns the first element of the array computed by the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link First}. + */ + public static First firstOf(AggregationExpression expression) { + return new First(expression); + } + + @Override + protected String getMongoMethod() { + return "$first"; + } + } + + /** + * {@link AggregationExpression} for {@code $last} that returns the last element in an array.
          + * NOTE: Requires MongoDB 4.4 or later. + * + * @author Divya Srivastava + * @author Christoph Strobl + * @since 3.4 + */ + public static class Last extends AbstractAggregationExpression { + + private Last(Object value) { + super(value); + } + + /** + * Returns the last element in the given array. + * + * @param array must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public static Last last(Object array) { + return new Last(array); + } + + /** + * Returns the last element in the array pointed to by the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public static Last lastOf(String fieldReference) { + return new Last(Fields.field(fieldReference)); + } + + /** + * Returns the last element of the array computed buy the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Last}. + */ + public static Last lastOf(AggregationExpression expression) { + return new Last(expression); + } + + @Override + protected String getMongoMethod() { + return "$last"; + } + } + + /** + * {@link AggregationExpression} for {@code $sortArray} that sorts elements in an array.
          + * + * @author Christoph Strobl + * @since 4.0 + */ + public static class SortArray extends AbstractAggregationExpression { + + private SortArray(Object value) { + super(value); + } + + /** + * Returns the given array. + * + * @param array must not be {@literal null}. + * @return new instance of {@link SortArray}. + */ + public static SortArray sortArray(Object array) { + return new SortArray(Collections.singletonMap("input", array)); + } + + /** + * Sorts the elements in the array pointed to by the given {@link Field field reference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link SortArray}. + */ + public static SortArray sortArrayOf(String fieldReference) { + return sortArray(Fields.field(fieldReference)); + } + + /** + * Sorts the elements of the array computed buy the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link SortArray}. + */ + public static SortArray sortArrayOf(AggregationExpression expression) { + return sortArray(expression); + } + + /** + * Set the order to put elements in. + * + * @param sort must not be {@literal null}. + * @return new instance of {@link SortArray}. + */ + @Contract("_ -> new") + public SortArray by(Sort sort) { + return new SortArray(append("sortBy", sort)); + } + + /** + * Order the values for the array in the given direction. + * + * @param direction must not be {@literal null}. + * @return new instance of {@link SortArray}. + * @since 4.5 + */ + public SortArray direction(Direction direction) { + return new SortArray(append("sortBy", direction.isAscending() ? 1 : -1)); + } + + /** + * Sort the array elements by their values in ascending order. Suitable for arrays of simple types (e.g., integers, + * strings). + * + * @return new instance of {@link SortArray}. + * @since 4.5 + */ + public SortArray byValueAscending() { + return direction(Direction.ASC); + } + + /** + * Sort the array elements by their values in descending order. Suitable for arrays of simple types (e.g., integers, + * strings). + * + * @return new instance of {@link SortArray}. + * @since 4.5 + */ + public SortArray byValueDescending() { + return direction(Direction.DESC); + } + + @Override + protected String getMongoMethod() { + return "$sortArray"; + } + } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperation.java new file mode 100644 index 0000000000..4d321c4715 --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BasicAggregationOperation.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import org.bson.Document; +import org.bson.conversions.Bson; + +import org.springframework.data.mongodb.util.BsonUtils; +import org.springframework.util.ObjectUtils; + +/** + * {@link AggregationOperation} implementation that can return a {@link Document} from a {@link Bson} or {@link String} + * document. + * + * @author Christoph Strobl + * @since 4.0 + */ +record BasicAggregationOperation(Object value) implements AggregationOperation { + + @Override + public Document toDocument(AggregationOperationContext context) { + + if (value instanceof Bson bson) { + return BsonUtils.asDocument(bson, context.getCodecRegistry()); + } + + if (value instanceof String json && BsonUtils.isJsonDocument(json)) { + return BsonUtils.parse(json, context); + } + + throw new IllegalStateException( + String.format("%s cannot be converted to org.bson.Document", ObjectUtils.nullSafeClassName(value))); + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BooleanOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BooleanOperators.java index df93fd8919..f3ffdb7ad1 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BooleanOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BooleanOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -19,6 +19,8 @@ import java.util.Collections; import java.util.List; +import org.jspecify.annotations.Nullable; +import org.springframework.lang.Contract; import org.springframework.util.Assert; /** @@ -34,7 +36,7 @@ public class BooleanOperators { * Take the array referenced by given {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link BooleanOperatorFactory}. */ public static BooleanOperatorFactory valueOf(String fieldReference) { return new BooleanOperatorFactory(fieldReference); @@ -44,7 +46,7 @@ public static BooleanOperatorFactory valueOf(String fieldReference) { * Take the value resulting of the given {@link AggregationExpression}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link BooleanOperatorFactory}. */ public static BooleanOperatorFactory valueOf(AggregationExpression fieldReference) { return new BooleanOperatorFactory(fieldReference); @@ -55,7 +57,7 @@ public static BooleanOperatorFactory valueOf(AggregationExpression fieldReferenc * opposite boolean value. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Not}. */ public static Not not(String fieldReference) { return Not.not(fieldReference); @@ -66,7 +68,7 @@ public static Not not(String fieldReference) { * and returns the opposite boolean value. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Not}. */ public static Not not(AggregationExpression expression) { return Not.not(expression); @@ -77,8 +79,8 @@ public static Not not(AggregationExpression expression) { */ public static class BooleanOperatorFactory { - private final String fieldReference; - private final AggregationExpression expression; + private final @Nullable String fieldReference; + private final @Nullable AggregationExpression expression; /** * Creates new {@link BooleanOperatorFactory} for given {@literal fieldReference}. @@ -87,7 +89,7 @@ public static class BooleanOperatorFactory { */ public BooleanOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; } @@ -99,7 +101,7 @@ public BooleanOperatorFactory(String fieldReference) { */ public BooleanOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; } @@ -109,11 +111,11 @@ public BooleanOperatorFactory(AggregationExpression expression) { * all of the expressions are {@literal true}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link And}. */ public And and(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createAnd().andExpression(expression); } @@ -122,14 +124,15 @@ public And and(AggregationExpression expression) { * all of the expressions are {@literal true}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link And}. */ public And and(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createAnd().andField(fieldReference); } + @SuppressWarnings("NullAway") private And createAnd() { return usesFieldRef() ? And.and(Fields.field(fieldReference)) : And.and(expression); } @@ -139,11 +142,11 @@ private And createAnd() { * any of the expressions are {@literal true}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Or}. */ public Or or(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createOr().orExpression(expression); } @@ -152,14 +155,15 @@ public Or or(AggregationExpression expression) { * any of the expressions are {@literal true}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Or}. */ public Or or(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return createOr().orField(fieldReference); } + @SuppressWarnings("NullAway") private Or createOr() { return usesFieldRef() ? Or.or(Fields.field(fieldReference)) : Or.or(expression); } @@ -167,8 +171,9 @@ private Or createOr() { /** * Creates new {@link AggregationExpression} that evaluates a boolean and returns the opposite boolean value. * - * @return + * @return new instance of {@link Not}. */ + @SuppressWarnings("NullAway") public Not not() { return usesFieldRef() ? Not.not(fieldReference) : Not.not(expression); } @@ -198,8 +203,8 @@ protected String getMongoMethod() { * Creates new {@link And} that evaluates one or more expressions and returns {@literal true} if all of the * expressions are {@literal true}. * - * @param expressions - * @return + * @param expressions must not be {@literal null}. + * @return new instance of {@link And}. */ public static And and(Object... expressions) { return new And(Arrays.asList(expressions)); @@ -209,11 +214,12 @@ public static And and(Object... expressions) { * Creates new {@link And} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link And}. */ + @Contract("_ -> new") public And andExpression(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new And(append(expression)); } @@ -221,11 +227,12 @@ public And andExpression(AggregationExpression expression) { * Creates new {@link And} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link And}. */ + @Contract("_ -> new") public And andField(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new And(append(Fields.field(fieldReference))); } @@ -233,11 +240,12 @@ public And andField(String fieldReference) { * Creates new {@link And} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link And}. */ + @Contract("_ -> new") public And andValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new And(append(value)); } } @@ -263,11 +271,11 @@ protected String getMongoMethod() { * expressions are {@literal true}. * * @param expressions must not be {@literal null}. - * @return + * @return new instance of {@link Or}. */ public static Or or(Object... expressions) { - Assert.notNull(expressions, "Expressions must not be null!"); + Assert.notNull(expressions, "Expressions must not be null"); return new Or(Arrays.asList(expressions)); } @@ -275,11 +283,12 @@ public static Or or(Object... expressions) { * Creates new {@link Or} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Or}. */ + @Contract("_ -> new") public Or orExpression(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Or(append(expression)); } @@ -287,11 +296,12 @@ public Or orExpression(AggregationExpression expression) { * Creates new {@link Or} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Or}. */ + @Contract("_ -> new") public Or orField(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Or(append(Fields.field(fieldReference))); } @@ -299,11 +309,12 @@ public Or orField(String fieldReference) { * Creates new {@link Or} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Or}. */ + @Contract("_ -> new") public Or orValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Or(append(value)); } } @@ -329,11 +340,11 @@ protected String getMongoMethod() { * value. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Not}. */ public static Not not(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Not(asFields(fieldReference)); } @@ -342,11 +353,11 @@ public static Not not(String fieldReference) { * returns the opposite boolean value. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Not}. */ public static Not not(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Not(Collections.singletonList(expression)); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java index 15b4cff218..16eca4ec22 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketAutoOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,12 +15,12 @@ */ package org.springframework.data.mongodb.core.aggregation; +import org.bson.Document; +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.aggregation.BucketAutoOperation.BucketAutoOperationOutputBuilder; import org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder; import org.springframework.util.Assert; -import org.bson.Document; - /** * Encapsulates the aggregation framework {@code $bucketAuto}-operation.
          * Bucket stage is typically used with {@link Aggregation} and {@code $facet}. Categorizes incoming documents into a @@ -29,8 +29,7 @@ * We recommend to use the static factory method {@link Aggregation#bucketAuto(String, int)} instead of creating * instances of this class directly. * - * @see https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/ + * @see https://docs.mongodb.org/manual/reference/aggregation/bucketAuto/ * @see BucketOperationSupport * @author Mark Paluch * @author Christoph Strobl @@ -40,7 +39,7 @@ public class BucketAutoOperation extends BucketOperationSupport 0, "Number of buckets must be greater 0!"); + Assert.isTrue(buckets > 0, "Number of buckets must be greater 0"); this.buckets = buckets; this.granularity = null; @@ -68,7 +67,7 @@ public BucketAutoOperation(AggregationExpression groupByExpression, int buckets) super(groupByExpression); - Assert.isTrue(buckets > 0, "Number of buckets must be greater 0!"); + Assert.isTrue(buckets > 0, "Number of buckets must be greater 0"); this.buckets = buckets; this.granularity = null; @@ -82,7 +81,7 @@ private BucketAutoOperation(BucketAutoOperation bucketOperation, Outputs outputs this.granularity = bucketOperation.granularity; } - private BucketAutoOperation(BucketAutoOperation bucketOperation, int buckets, String granularity) { + private BucketAutoOperation(BucketAutoOperation bucketOperation, int buckets, @Nullable String granularity) { super(bucketOperation); @@ -90,9 +89,6 @@ private BucketAutoOperation(BucketAutoOperation bucketOperation, int buckets, St this.granularity = granularity; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -106,18 +102,23 @@ public Document toDocument(AggregationOperationContext context) { options.putAll(super.toDocument(context)); - return new Document("$bucketAuto", options); + return new Document(getOperator(), options); + } + + @Override + public String getOperator() { + return "$bucketAuto"; } /** * Configures a number of bucket {@literal buckets} and return a new {@link BucketAutoOperation}. * * @param buckets must be a positive number. - * @return + * @return new instance of {@link BucketAutoOperation}. */ public BucketAutoOperation withBuckets(int buckets) { - Assert.isTrue(buckets > 0, "Number of buckets must be greater 0!"); + Assert.isTrue(buckets > 0, "Number of buckets must be greater 0"); return new BucketAutoOperation(this, buckets, granularity); } @@ -128,42 +129,30 @@ public BucketAutoOperation withBuckets(int buckets) { * Use either predefined {@link Granularities} or provide a own one. * * @param granularity must not be {@literal null}. - * @return + * @return new instance of {@link BucketAutoOperation}. */ public BucketAutoOperation withGranularity(Granularity granularity) { - Assert.notNull(granularity, "Granularity must not be null!"); + Assert.notNull(granularity, "Granularity must not be null"); return new BucketAutoOperation(this, buckets, granularity.getMongoRepresentation()); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#newBucketOperation(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Outputs) - */ @Override protected BucketAutoOperation newBucketOperation(Outputs outputs) { return new BucketAutoOperation(this, outputs); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutputExpression(java.lang.String, java.lang.Object[]) - */ @Override public ExpressionBucketAutoOperationBuilder andOutputExpression(String expression, Object... params) { return new ExpressionBucketAutoOperationBuilder(expression, this, params); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutput(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override public BucketAutoOperationOutputBuilder andOutput(AggregationExpression expression) { return new BucketAutoOperationOutputBuilder(expression, this); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutput(java.lang.String) - */ @Override public BucketAutoOperationOutputBuilder andOutput(String fieldName) { return new BucketAutoOperationOutputBuilder(Fields.field(fieldName), this); @@ -185,9 +174,6 @@ protected BucketAutoOperationOutputBuilder(Object value, BucketAutoOperation ope super(value, operation); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder#apply(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OperationOutput) - */ @Override protected BucketAutoOperationOutputBuilder apply(OperationOutput operationOutput) { return new BucketAutoOperationOutputBuilder(operationOutput, this.operation); @@ -209,16 +195,13 @@ public static class ExpressionBucketAutoOperationBuilder * * @param expression must not be {@literal null}. * @param operation must not be {@literal null}. - * @param parameters + * @param parameters must not be {@literal null}. */ protected ExpressionBucketAutoOperationBuilder(String expression, BucketAutoOperation operation, Object[] parameters) { super(expression, operation, parameters); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder#apply(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OperationOutput) - */ @Override protected BucketAutoOperationOutputBuilder apply(OperationOutput operationOutput) { return new BucketAutoOperationOutputBuilder(operationOutput, this.operation); @@ -240,8 +223,7 @@ public interface Granularity { /** * Supported MongoDB granularities. * - * @see https://docs.mongodb.com/manual/reference/operator/aggregation/bucketAuto/#granularity * @author Mark Paluch */ public enum Granularities implements Granularity { @@ -264,9 +246,6 @@ public enum Granularities implements Granularity { this.granularity = granularity; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.GranularitytoMongoGranularity() - */ @Override public String getMongoRepresentation() { return granularity; diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java index 7ee57da27c..6ed686c086 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -20,17 +20,16 @@ import java.util.Collections; import java.util.List; +import org.bson.Document; +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.aggregation.BucketOperation.BucketOperationOutputBuilder; +import org.springframework.lang.Contract; import org.springframework.util.Assert; -import org.bson.Document; - /** * Encapsulates the aggregation framework {@code $bucket}-operation.
          - * * Bucket stage is typically used with {@link Aggregation} and {@code $facet}. Categorizes incoming documents into * groups, called buckets, based on a specified expression and bucket boundaries.
          - * * We recommend to use the static factory method {@link Aggregation#bucket(String)} instead of creating instances of * this class directly. * @@ -43,7 +42,7 @@ public class BucketOperation extends BucketOperationSupport boundaries; - private final Object defaultBucket; + private final @Nullable Object defaultBucket; /** * Creates a new {@link BucketOperation} given a {@link Field group-by field}. @@ -79,17 +78,14 @@ private BucketOperation(BucketOperation bucketOperation, Outputs outputs) { this.defaultBucket = bucketOperation.defaultBucket; } - private BucketOperation(BucketOperation bucketOperation, List boundaries, Object defaultBucket) { + private BucketOperation(BucketOperation bucketOperation, List boundaries, @Nullable Object defaultBucket) { super(bucketOperation); - this.boundaries = new ArrayList(boundaries); + this.boundaries = new ArrayList<>(boundaries); this.defaultBucket = defaultBucket; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -103,18 +99,24 @@ public Document toDocument(AggregationOperationContext context) { options.putAll(super.toDocument(context)); - return new Document("$bucket", options); + return new Document(getOperator(), options); + } + + @Override + public String getOperator() { + return "$bucket"; } /** * Configures a default bucket {@literal literal} and return a new {@link BucketOperation}. * * @param literal must not be {@literal null}. - * @return + * @return new instance of {@link BucketOperation}. */ + @Contract("_ -> new") public BucketOperation withDefaultBucket(Object literal) { - Assert.notNull(literal, "Default bucket literal must not be null!"); + Assert.notNull(literal, "Default bucket literal must not be null"); return new BucketOperation(this, boundaries, literal); } @@ -123,47 +125,36 @@ public BucketOperation withDefaultBucket(Object literal) { * preserved and the new {@literal boundaries} are appended. * * @param boundaries must not be {@literal null}. - * @return + * @return new instance of {@link BucketOperation}. */ + @Contract("_ -> new") public BucketOperation withBoundaries(Object... boundaries) { - Assert.notNull(boundaries, "Boundaries must not be null!"); - Assert.noNullElements(boundaries, "Boundaries must not contain null values!"); + Assert.notNull(boundaries, "Boundaries must not be null"); + Assert.noNullElements(boundaries, "Boundaries must not contain null values"); - List newBoundaries = new ArrayList(this.boundaries.size() + boundaries.length); + List newBoundaries = new ArrayList<>(this.boundaries.size() + boundaries.length); newBoundaries.addAll(this.boundaries); newBoundaries.addAll(Arrays.asList(boundaries)); return new BucketOperation(this, newBoundaries, defaultBucket); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#newBucketOperation(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Outputs) - */ @Override protected BucketOperation newBucketOperation(Outputs outputs) { return new BucketOperation(this, outputs); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutputExpression(java.lang.String, java.lang.Object[]) - */ @Override public ExpressionBucketOperationBuilder andOutputExpression(String expression, Object... params) { return new ExpressionBucketOperationBuilder(expression, this, params); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutput(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override public BucketOperationOutputBuilder andOutput(AggregationExpression expression) { return new BucketOperationOutputBuilder(expression, this); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport#andOutput(java.lang.String) - */ @Override public BucketOperationOutputBuilder andOutput(String fieldName) { return new BucketOperationOutputBuilder(Fields.field(fieldName), this); @@ -185,9 +176,6 @@ protected BucketOperationOutputBuilder(Object value, BucketOperation operation) super(value, operation); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder#apply(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OperationOutput) - */ @Override protected BucketOperationOutputBuilder apply(OperationOutput operationOutput) { return new BucketOperationOutputBuilder(operationOutput, this.operation); @@ -204,20 +192,17 @@ public static class ExpressionBucketOperationBuilder extends ExpressionBucketOperationBuilderSupport { /** - * Creates a new {@link ExpressionBucketOperationBuilderSupport} for the given value, {@link BucketOperation} - * and parameters. + * Creates a new {@link ExpressionBucketOperationBuilderSupport} for the given value, {@link BucketOperation} and + * parameters. * * @param expression must not be {@literal null}. * @param operation must not be {@literal null}. - * @param parameters + * @param parameters must not be {@literal null}. */ protected ExpressionBucketOperationBuilder(String expression, BucketOperation operation, Object[] parameters) { super(expression, operation, parameters); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder#apply(org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OperationOutput) - */ @Override protected BucketOperationOutputBuilder apply(OperationOutput operationOutput) { return new BucketOperationOutputBuilder(operationOutput, this.operation); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java index f8eec4ddbb..3d5ded05c2 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/BucketOperationSupport.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -21,14 +21,14 @@ import java.util.Collections; import java.util.List; +import org.bson.Document; +import org.jspecify.annotations.Nullable; import org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.OutputBuilder; import org.springframework.data.mongodb.core.aggregation.ExposedFields.ExposedField; import org.springframework.data.mongodb.core.aggregation.ProjectionOperation.ProjectionOperationBuilder; import org.springframework.expression.spel.ast.Projection; import org.springframework.util.Assert; -import org.bson.Document; - /** * Base class for bucket operations that support output expressions the aggregation framework.
          * Bucket stages collect documents into buckets and can contribute output fields.
          @@ -41,8 +41,8 @@ public abstract class BucketOperationSupport, B extends OutputBuilder> implements FieldsExposingAggregationOperation { - private final Field groupByField; - private final AggregationExpression groupByExpression; + private final @Nullable Field groupByField; + private final @Nullable AggregationExpression groupByExpression; private final Outputs outputs; /** @@ -52,7 +52,7 @@ public abstract class BucketOperationSupport operationSupport) */ protected BucketOperationSupport(BucketOperationSupport operationSupport, Outputs outputs) { - Assert.notNull(operationSupport, "BucketOperationSupport must not be null!"); - Assert.notNull(outputs, "Outputs must not be null!"); + Assert.notNull(operationSupport, "BucketOperationSupport must not be null"); + Assert.notNull(outputs, "Outputs must not be null"); this.groupByField = operationSupport.groupByField; this.groupByExpression = operationSupport.groupByExpression; @@ -104,7 +104,7 @@ protected BucketOperationSupport(BucketOperationSupport operationSupport, * * @param expression the SpEL expression, must not be {@literal null} or empty. * @param params must not be {@literal null} - * @return + * @return new instance of {@link ExpressionBucketOperationBuilderSupport} to create {@link BucketOperation}. */ public abstract ExpressionBucketOperationBuilderSupport andOutputExpression(String expression, Object... params); @@ -114,7 +114,7 @@ public abstract ExpressionBucketOperationBuilderSupport andOutputExpressio * resulting bucket documents. * * @param expression the SpEL expression, must not be {@literal null} or empty. - * @return + * @return never {@literal null}. */ public abstract B andOutput(AggregationExpression expression); @@ -124,14 +124,14 @@ public abstract ExpressionBucketOperationBuilderSupport andOutputExpressio * {@literal fieldName}. * * @param fieldName must not be {@literal null} or empty. - * @return + * @return never {@literal null}. */ public abstract B andOutput(String fieldName); /** * Creates a new {@link BucketOperationSupport} given to add a count field to the resulting bucket documents. * - * @return + * @return never {@literal null}. */ public B andOutputCount() { return andOutput(new AggregationExpression() { @@ -142,16 +142,18 @@ public Document toDocument(AggregationOperationContext context) { }); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override + public Document toDocument(AggregationOperationContext context) { Document document = new Document(); - document.put("groupBy", groupByExpression == null ? context.getReference(groupByField).toString() - : groupByExpression.toDocument(context)); + if(groupByExpression != null) { + document.put("groupBy", groupByExpression.toDocument(context)); + } else if (groupByField != null) { + document.put("groupBy", context.getReference(groupByField).toString()); + + } if (!outputs.isEmpty()) { document.put("output", outputs.toDocument(context)); @@ -160,9 +162,6 @@ public Document toDocument(AggregationOperationContext context) { return document; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields() - */ @Override public ExposedFields getFields() { return outputs.asExposedFields(); @@ -220,8 +219,8 @@ public abstract static class OutputBuilder, T exte */ protected OutputBuilder(Object value, T operation) { - Assert.notNull(value, "Value must not be null or empty!"); - Assert.notNull(operation, "ProjectionOperation must not be null!"); + Assert.notNull(value, "Value must not be null or empty"); + Assert.notNull(operation, "ProjectionOperation must not be null"); this.value = value; this.operation = operation; @@ -231,7 +230,7 @@ protected OutputBuilder(Object value, T operation) { * Generates a builder for a {@code $sum}-expression.
          * Count expressions are emulated via {@code $sum: 1}. * - * @return + * @return never {@literal null}. */ public B count() { return sum(1); @@ -240,7 +239,7 @@ public B count() { /** * Generates a builder for a {@code $sum}-expression for the current value. * - * @return + * @return never {@literal null}. */ public B sum() { return apply(Accumulators.SUM); @@ -249,8 +248,8 @@ public B sum() { /** * Generates a builder for a {@code $sum}-expression for the given {@literal value}. * - * @param value - * @return + * @param value must not be {@literal null}. + * @return never {@literal null}. */ public B sum(Number value) { return apply(new OperationOutput(Accumulators.SUM.getMongoOperator(), Collections.singleton(value))); @@ -259,7 +258,7 @@ public B sum(Number value) { /** * Generates a builder for an {@code $last}-expression for the current value.. * - * @return + * @return never {@literal null}. */ public B last() { return apply(Accumulators.LAST); @@ -268,7 +267,7 @@ public B last() { /** * Generates a builder for a {@code $first}-expression the current value. * - * @return + * @return never {@literal null}. */ public B first() { return apply(Accumulators.FIRST); @@ -277,8 +276,7 @@ public B first() { /** * Generates a builder for an {@code $avg}-expression for the current value. * - * @param reference - * @return + * @return never {@literal null}. */ public B avg() { return apply(Accumulators.AVG); @@ -287,7 +285,7 @@ public B avg() { /** * Generates a builder for an {@code $min}-expression for the current value. * - * @return + * @return never {@literal null}. */ public B min() { return apply(Accumulators.MIN); @@ -296,7 +294,7 @@ public B min() { /** * Generates a builder for an {@code $max}-expression for the current value. * - * @return + * @return never {@literal null}. */ public B max() { return apply(Accumulators.MAX); @@ -305,7 +303,7 @@ public B max() { /** * Generates a builder for an {@code $push}-expression for the current value. * - * @return + * @return never {@literal null}. */ public B push() { return apply(Accumulators.PUSH); @@ -314,7 +312,7 @@ public B push() { /** * Generates a builder for an {@code $addToSet}-expression for the current value. * - * @return + * @return never {@literal null}. */ public B addToSet() { return apply(Accumulators.ADDTOSET); @@ -325,14 +323,14 @@ public B addToSet() { * * @param operation the operation name, must not be {@literal null} or empty. * @param values must not be {@literal null}. - * @return + * @return never {@literal null}. */ public B apply(String operation, Object... values) { - Assert.hasText(operation, "Operation must not be empty or null!"); - Assert.notNull(value, "Values must not be null!"); + Assert.hasText(operation, "Operation must not be empty or null"); + Assert.notNull(value, "Values must not be null"); - List objects = new ArrayList(values.length + 1); + List objects = new ArrayList<>(values.length + 1); objects.add(value); objects.addAll(Arrays.asList(values)); return apply(new OperationOutput(operation, objects)); @@ -342,7 +340,7 @@ public B apply(String operation, Object... values) { * Apply an {@link OperationOutput} to this output. * * @param operationOutput must not be {@literal null}. - * @return + * @return never {@literal null}. */ protected abstract B apply(OperationOutput operationOutput); @@ -354,16 +352,16 @@ private B apply(Accumulators operation) { * Returns the finally to be applied {@link BucketOperation} with the given alias. * * @param alias will never be {@literal null} or empty. - * @return + * @return never {@literal null}. */ public T as(String alias) { - if (value instanceof OperationOutput) { - return this.operation.andOutput(((OperationOutput) this.value).withAlias(alias)); + if (value instanceof OperationOutput operationOutput) { + return this.operation.andOutput(operationOutput.withAlias(alias)); } if (value instanceof Field) { - throw new IllegalStateException("Cannot add a field as top-level output. Use accumulator expressions."); + throw new IllegalStateException("Cannot add a field as top-level output; Use accumulator expressions"); } return this.operation @@ -376,7 +374,7 @@ private enum Accumulators { SUM("$sum"), AVG("$avg"), FIRST("$first"), LAST("$last"), MAX("$max"), MIN("$min"), PUSH("$push"), ADDTOSET( "$addToSet"); - private String mongoOperator; + private final String mongoOperator; Accumulators(String mongoOperator) { this.mongoOperator = mongoOperator; @@ -396,7 +394,7 @@ protected static class Outputs implements AggregationExpression { protected static final Outputs EMPTY = new Outputs(); - private List outputs; + private final List outputs; /** * Creates a new, empty {@link Outputs}. @@ -445,7 +443,7 @@ protected ExposedFields asExposedFields() { */ protected Outputs and(Output output) { - Assert.notNull(output, "BucketOutput must not be null!"); + Assert.notNull(output, "BucketOutput must not be null"); return new Outputs(this.outputs, output); } @@ -456,9 +454,6 @@ protected boolean isEmpty() { return outputs.isEmpty(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -491,7 +486,7 @@ protected abstract static class Output implements AggregationExpression { */ protected Output(Field field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); this.field = new ExposedField(field, true); } @@ -527,11 +522,11 @@ public OperationOutput(String operation, Collection values) { super(Fields.field(operation)); - Assert.hasText(operation, "Operation must not be null or empty!"); - Assert.notNull(values, "Values must not be null!"); + Assert.hasText(operation, "Operation must not be null or empty"); + Assert.notNull(values, "Values must not be null"); this.operation = operation; - this.values = new ArrayList(values); + this.values = new ArrayList<>(values); } private OperationOutput(Field field, OperationOutput operationOutput) { @@ -542,32 +537,27 @@ private OperationOutput(Field field, OperationOutput operationOutput) { this.values = operationOutput.values; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ProjectionOperation.Projection#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { List operationArguments = getOperationArguments(context); - return new Document(operation, - operationArguments.size() == 1 ? operationArguments.get(0) : operationArguments); + return new Document(operation, operationArguments.size() == 1 ? operationArguments.get(0) : operationArguments); } protected List getOperationArguments(AggregationOperationContext context) { - List result = new ArrayList(values != null ? values.size() : 1); + List result = new ArrayList<>(values != null ? values.size() : 1); for (Object element : values) { - if (element instanceof Field) { - result.add(context.getReference((Field) element).toString()); - } else if (element instanceof Fields) { - for (Field field : (Fields) element) { + if (element instanceof Field field) { + result.add(context.getReference(field).toString()); + } else if (element instanceof Fields fields) { + for (Field field : fields) { result.add(context.getReference(field).toString()); } - } else if (element instanceof AggregationExpression) { - result.add(((AggregationExpression) element).toDocument(context)); + } else if (element instanceof AggregationExpression aggregationExpression) { + result.add(aggregationExpression.toDocument(context)); } else { result.add(element); } @@ -579,7 +569,7 @@ protected List getOperationArguments(AggregationOperationContext context /** * Returns the field that holds the {@link ProjectionOperationBuilder.OperationProjection}. * - * @return + * @return never {@literal null}. */ protected Field getField() { return getExposedField(); @@ -589,7 +579,7 @@ protected Field getField() { * Creates a new instance of this {@link OperationOutput} with the given alias. * * @param alias the alias to set - * @return + * @return new instance of {@link OperationOutput}. */ public OperationOutput withAlias(String alias) { @@ -632,19 +622,18 @@ public SpelExpressionOutput(String expression, Object[] parameters) { super(Fields.field(expression)); - Assert.hasText(expression, "Expression must not be null!"); - Assert.notNull(parameters, "Parameters must not be null!"); + Assert.hasText(expression, "Expression must not be null"); + Assert.notNull(parameters, "Parameters must not be null"); this.expression = expression; this.params = parameters.clone(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Output#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { - return (Document) TRANSFORMER.transform(expression, context, params); + + Object o = TRANSFORMER.transform(expression, context, params); + return o instanceof Document document ? document : new Document(); } } @@ -658,8 +647,8 @@ private static class AggregationExpressionOutput extends Output { /** * Creates a new {@link AggregationExpressionOutput}. * - * @param field - * @param expression + * @param field must not be {@literal null}. + * @param expression must not be {@literal null}. */ protected AggregationExpressionOutput(Field field, AggregationExpression expression) { @@ -668,9 +657,6 @@ protected AggregationExpressionOutput(Field field, AggregationExpression express this.expression = expression; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.BucketOperationSupport.Output#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { return expression.toDocument(context); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ComparisonOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ComparisonOperators.java index 112afddbd0..e2626c3a16 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ComparisonOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ComparisonOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -18,6 +18,8 @@ import java.util.Collections; import java.util.List; +import org.jspecify.annotations.Nullable; +import org.springframework.lang.Contract; import org.springframework.util.Assert; /** @@ -32,7 +34,7 @@ public class ComparisonOperators { * Take the field referenced by given {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ComparisonOperatorFactory}. */ public static ComparisonOperatorFactory valueOf(String fieldReference) { return new ComparisonOperatorFactory(fieldReference); @@ -42,7 +44,7 @@ public static ComparisonOperatorFactory valueOf(String fieldReference) { * Take the value resulting from the given {@link AggregationExpression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ComparisonOperatorFactory}. */ public static ComparisonOperatorFactory valueOf(AggregationExpression expression) { return new ComparisonOperatorFactory(expression); @@ -50,8 +52,8 @@ public static ComparisonOperatorFactory valueOf(AggregationExpression expression public static class ComparisonOperatorFactory { - private final String fieldReference; - private final AggregationExpression expression; + private final @Nullable String fieldReference; + private final @Nullable AggregationExpression expression; /** * Creates new {@link ComparisonOperatorFactory} for given {@literal fieldReference}. @@ -60,7 +62,7 @@ public static class ComparisonOperatorFactory { */ public ComparisonOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; } @@ -72,7 +74,7 @@ public ComparisonOperatorFactory(String fieldReference) { */ public ComparisonOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; } @@ -81,7 +83,7 @@ public ComparisonOperatorFactory(AggregationExpression expression) { * Creates new {@link AggregationExpression} that compares two values. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Cmp}. */ public Cmp compareTo(String fieldReference) { return createCmp().compareTo(fieldReference); @@ -91,7 +93,7 @@ public Cmp compareTo(String fieldReference) { * Creates new {@link AggregationExpression} that compares two values. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Cmp}. */ public Cmp compareTo(AggregationExpression expression) { return createCmp().compareTo(expression); @@ -101,12 +103,13 @@ public Cmp compareTo(AggregationExpression expression) { * Creates new {@link AggregationExpression} that compares two values. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Cmp}. */ public Cmp compareToValue(Object value) { return createCmp().compareToValue(value); } + @SuppressWarnings("NullAway") private Cmp createCmp() { return usesFieldRef() ? Cmp.valueOf(fieldReference) : Cmp.valueOf(expression); } @@ -116,7 +119,7 @@ private Cmp createCmp() { * value is equal to the value of the referenced field. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Eq}. */ public Eq equalTo(String fieldReference) { return createEq().equalTo(fieldReference); @@ -127,7 +130,7 @@ public Eq equalTo(String fieldReference) { * value is equal to the expression result. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Eq}. */ public Eq equalTo(AggregationExpression expression) { return createEq().equalTo(expression); @@ -138,12 +141,13 @@ public Eq equalTo(AggregationExpression expression) { * value is equal to the given value. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Eq}. */ public Eq equalToValue(Object value) { return createEq().equalToValue(value); } + @SuppressWarnings("NullAway") private Eq createEq() { return usesFieldRef() ? Eq.valueOf(fieldReference) : Eq.valueOf(expression); } @@ -153,7 +157,7 @@ private Eq createEq() { * value is greater than the value of the referenced field. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Gt}. */ public Gt greaterThan(String fieldReference) { return createGt().greaterThan(fieldReference); @@ -164,7 +168,7 @@ public Gt greaterThan(String fieldReference) { * value is greater than the expression result. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Gt}. */ public Gt greaterThan(AggregationExpression expression) { return createGt().greaterThan(expression); @@ -175,12 +179,13 @@ public Gt greaterThan(AggregationExpression expression) { * value is greater than the given value. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Gt}. */ public Gt greaterThanValue(Object value) { return createGt().greaterThanValue(value); } + @SuppressWarnings("NullAway") private Gt createGt() { return usesFieldRef() ? Gt.valueOf(fieldReference) : Gt.valueOf(expression); } @@ -190,7 +195,7 @@ private Gt createGt() { * value is greater than or equivalent to the value of the referenced field. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Gte}. */ public Gte greaterThanEqualTo(String fieldReference) { return createGte().greaterThanEqualTo(fieldReference); @@ -201,7 +206,7 @@ public Gte greaterThanEqualTo(String fieldReference) { * value is greater than or equivalent to the expression result. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Gte}. */ public Gte greaterThanEqualTo(AggregationExpression expression) { return createGte().greaterThanEqualTo(expression); @@ -212,12 +217,13 @@ public Gte greaterThanEqualTo(AggregationExpression expression) { * value is greater than or equivalent to the given value. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Gte}. */ public Gte greaterThanEqualToValue(Object value) { return createGte().greaterThanEqualToValue(value); } + @SuppressWarnings("NullAway") private Gte createGte() { return usesFieldRef() ? Gte.valueOf(fieldReference) : Gte.valueOf(expression); } @@ -227,7 +233,7 @@ private Gte createGte() { * value is less than the value of the referenced field. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Lt}. */ public Lt lessThan(String fieldReference) { return createLt().lessThan(fieldReference); @@ -238,7 +244,7 @@ public Lt lessThan(String fieldReference) { * value is less than the expression result. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Lt}. */ public Lt lessThan(AggregationExpression expression) { return createLt().lessThan(expression); @@ -249,12 +255,13 @@ public Lt lessThan(AggregationExpression expression) { * value is less than to the given value. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Lt}. */ public Lt lessThanValue(Object value) { return createLt().lessThanValue(value); } + @SuppressWarnings("NullAway") private Lt createLt() { return usesFieldRef() ? Lt.valueOf(fieldReference) : Lt.valueOf(expression); } @@ -264,7 +271,7 @@ private Lt createLt() { * value is less than or equivalent to the value of the referenced field. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Lte}. */ public Lte lessThanEqualTo(String fieldReference) { return createLte().lessThanEqualTo(fieldReference); @@ -275,7 +282,7 @@ public Lte lessThanEqualTo(String fieldReference) { * value is less than or equivalent to the expression result. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Lte}. */ public Lte lessThanEqualTo(AggregationExpression expression) { return createLte().lessThanEqualTo(expression); @@ -285,13 +292,14 @@ public Lte lessThanEqualTo(AggregationExpression expression) { * Creates new {@link AggregationExpression} that compares two values and returns {@literal true} when the first * value is less than or equivalent to the given value. * - * @param value - * @return + * @param value must not be {@literal null}. + * @return new instance of {@link Lte}. */ public Lte lessThanEqualToValue(Object value) { return createLte().lessThanEqualToValue(value); } + @SuppressWarnings("NullAway") private Lte createLte() { return usesFieldRef() ? Lte.valueOf(fieldReference) : Lte.valueOf(expression); } @@ -301,7 +309,7 @@ private Lte createLte() { * are not equivalent. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Ne}. */ public Ne notEqualTo(String fieldReference) { return createNe().notEqualTo(fieldReference); @@ -312,7 +320,7 @@ public Ne notEqualTo(String fieldReference) { * are not equivalent. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Ne}. */ public Ne notEqualTo(AggregationExpression expression) { return createNe().notEqualTo(expression); @@ -323,12 +331,13 @@ public Ne notEqualTo(AggregationExpression expression) { * are not equivalent. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Ne}. */ public Ne notEqualToValue(Object value) { return createNe().notEqualToValue(value); } + @SuppressWarnings("NullAway") private Ne createNe() { return usesFieldRef() ? Ne.valueOf(fieldReference) : Ne.valueOf(expression); } @@ -358,11 +367,11 @@ protected String getMongoMethod() { * Creates new {@link Cmp}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Cmp}. */ public static Cmp valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Cmp(asFields(fieldReference)); } @@ -370,11 +379,11 @@ public static Cmp valueOf(String fieldReference) { * Creates new {@link Cmp}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Cmp}. */ public static Cmp valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Cmp(Collections.singletonList(expression)); } @@ -382,11 +391,12 @@ public static Cmp valueOf(AggregationExpression expression) { * Creates new {@link Cmp} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Cmp}. */ + @Contract("_ -> new") public Cmp compareTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Cmp(append(Fields.field(fieldReference))); } @@ -394,11 +404,12 @@ public Cmp compareTo(String fieldReference) { * Creates new {@link Cmp} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Cmp}. */ + @Contract("_ -> new") public Cmp compareTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Cmp(append(expression)); } @@ -406,12 +417,13 @@ public Cmp compareTo(AggregationExpression expression) { * Creates new {@link Cmp} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Cmp}. */ + @Contract("_ -> new") public Cmp compareToValue(Object value) { - Assert.notNull(value, "Value must not be null!"); - return new Cmp(append(value)); + Assert.notNull(value, "Value must not be null"); + return new Cmp(append(value, Expand.KEEP_SOURCE)); } } @@ -435,11 +447,11 @@ protected String getMongoMethod() { * Creates new {@link Eq}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Eq}. */ public static Eq valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Eq(asFields(fieldReference)); } @@ -447,11 +459,11 @@ public static Eq valueOf(String fieldReference) { * Creates new {@link Eq}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Eq}. */ public static Eq valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Eq(Collections.singletonList(expression)); } @@ -459,11 +471,12 @@ public static Eq valueOf(AggregationExpression expression) { * Creates new {@link Eq} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Eq}. */ + @Contract("_ -> new") public Eq equalTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Eq(append(Fields.field(fieldReference))); } @@ -471,11 +484,12 @@ public Eq equalTo(String fieldReference) { * Creates new {@link Eq} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Eq}. */ + @Contract("_ -> new") public Eq equalTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Eq(append(expression)); } @@ -483,12 +497,13 @@ public Eq equalTo(AggregationExpression expression) { * Creates new {@link Eq} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Eq}. */ + @Contract("_ -> new") public Eq equalToValue(Object value) { - Assert.notNull(value, "Value must not be null!"); - return new Eq(append(value)); + Assert.notNull(value, "Value must not be null"); + return new Eq(append(value, Expand.KEEP_SOURCE)); } } @@ -512,11 +527,11 @@ protected String getMongoMethod() { * Creates new {@link Gt}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Gt}. */ public static Gt valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Gt(asFields(fieldReference)); } @@ -524,11 +539,11 @@ public static Gt valueOf(String fieldReference) { * Creates new {@link Gt}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Gt}. */ public static Gt valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Gt(Collections.singletonList(expression)); } @@ -536,11 +551,12 @@ public static Gt valueOf(AggregationExpression expression) { * Creates new {@link Gt} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Gt}. */ + @Contract("_ -> new") public Gt greaterThan(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Gt(append(Fields.field(fieldReference))); } @@ -548,11 +564,12 @@ public Gt greaterThan(String fieldReference) { * Creates new {@link Gt} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Gt}. */ + @Contract("_ -> new") public Gt greaterThan(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Gt(append(expression)); } @@ -560,11 +577,12 @@ public Gt greaterThan(AggregationExpression expression) { * Creates new {@link Gt} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Gt}. */ + @Contract("_ -> new") public Gt greaterThanValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Gt(append(value)); } } @@ -589,11 +607,11 @@ protected String getMongoMethod() { * Creates new {@link Lt}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Lt}. */ public static Lt valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Lt(asFields(fieldReference)); } @@ -601,11 +619,11 @@ public static Lt valueOf(String fieldReference) { * Creates new {@link Lt}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Lt}. */ public static Lt valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Lt(Collections.singletonList(expression)); } @@ -613,11 +631,12 @@ public static Lt valueOf(AggregationExpression expression) { * Creates new {@link Lt} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Lt}. */ + @Contract("_ -> new") public Lt lessThan(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Lt(append(Fields.field(fieldReference))); } @@ -625,11 +644,12 @@ public Lt lessThan(String fieldReference) { * Creates new {@link Lt} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Lt}. */ + @Contract("_ -> new") public Lt lessThan(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Lt(append(expression)); } @@ -637,11 +657,12 @@ public Lt lessThan(AggregationExpression expression) { * Creates new {@link Lt} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Lt}. */ + @Contract("_ -> new") public Lt lessThanValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Lt(append(value)); } } @@ -666,11 +687,11 @@ protected String getMongoMethod() { * Creates new {@link Gte}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Gte}. */ public static Gte valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Gte(asFields(fieldReference)); } @@ -678,11 +699,11 @@ public static Gte valueOf(String fieldReference) { * Creates new {@link Gte}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Gte}. */ public static Gte valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Gte(Collections.singletonList(expression)); } @@ -690,11 +711,12 @@ public static Gte valueOf(AggregationExpression expression) { * Creates new {@link Gte} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Gte}. */ + @Contract("_ -> new") public Gte greaterThanEqualTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Gte(append(Fields.field(fieldReference))); } @@ -702,11 +724,12 @@ public Gte greaterThanEqualTo(String fieldReference) { * Creates new {@link Gte} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Gte}. */ + @Contract("_ -> new") public Gte greaterThanEqualTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Gte(append(expression)); } @@ -714,11 +737,12 @@ public Gte greaterThanEqualTo(AggregationExpression expression) { * Creates new {@link Gte} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Gte}. */ + @Contract("_ -> new") public Gte greaterThanEqualToValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Gte(append(value)); } } @@ -743,11 +767,11 @@ protected String getMongoMethod() { * Creates new {@link Lte}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Lte}. */ public static Lte valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Lte(asFields(fieldReference)); } @@ -755,11 +779,11 @@ public static Lte valueOf(String fieldReference) { * Creates new {@link Lte}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Lte}. */ public static Lte valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Lte(Collections.singletonList(expression)); } @@ -767,11 +791,12 @@ public static Lte valueOf(AggregationExpression expression) { * Creates new {@link Lte} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Lte}. */ + @Contract("_ -> new") public Lte lessThanEqualTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Lte(append(Fields.field(fieldReference))); } @@ -779,11 +804,12 @@ public Lte lessThanEqualTo(String fieldReference) { * Creates new {@link Lte} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Lte}. */ + @Contract("_ -> new") public Lte lessThanEqualTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Lte(append(expression)); } @@ -791,11 +817,12 @@ public Lte lessThanEqualTo(AggregationExpression expression) { * Creates new {@link Lte} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Lte}. */ + @Contract("_ -> new") public Lte lessThanEqualToValue(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new Lte(append(value)); } } @@ -820,11 +847,11 @@ protected String getMongoMethod() { * Creates new {@link Ne}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Ne}. */ public static Ne valueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Ne(asFields(fieldReference)); } @@ -832,11 +859,11 @@ public static Ne valueOf(String fieldReference) { * Creates new {@link Ne}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Ne}. */ public static Ne valueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Ne(Collections.singletonList(expression)); } @@ -844,11 +871,12 @@ public static Ne valueOf(AggregationExpression expression) { * Creates new {@link Ne} with all previously added arguments appending the given one. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Ne}. */ + @Contract("_ -> new") public Ne notEqualTo(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new Ne(append(Fields.field(fieldReference))); } @@ -856,11 +884,12 @@ public Ne notEqualTo(String fieldReference) { * Creates new {@link Ne} with all previously added arguments appending the given one. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link Ne}. */ + @Contract("_ -> new") public Ne notEqualTo(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new Ne(append(expression)); } @@ -868,12 +897,13 @@ public Ne notEqualTo(AggregationExpression expression) { * Creates new {@link Eq} with all previously added arguments appending the given one. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link Ne}. */ + @Contract("_ -> new") public Ne notEqualToValue(Object value) { - Assert.notNull(value, "Value must not be null!"); - return new Ne(append(value)); + Assert.notNull(value, "Value must not be null"); + return new Ne(append(value, Expand.KEEP_SOURCE)); } } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java index 18cd232473..462d94d6f1 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConditionalOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -17,16 +17,18 @@ import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.List; import org.bson.Document; +import org.jspecify.annotations.Nullable; import org.springframework.dao.InvalidDataAccessApiUsageException; import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.OtherwiseBuilder; import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.ThenBuilder; import org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Switch.CaseOperator; import org.springframework.data.mongodb.core.query.CriteriaDefinition; -import org.springframework.lang.Nullable; +import org.springframework.lang.Contract; import org.springframework.util.Assert; import org.springframework.util.ClassUtils; @@ -43,7 +45,7 @@ public class ConditionalOperators { * Take the field referenced by given {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link ConditionalOperatorFactory}. */ public static ConditionalOperatorFactory when(String fieldReference) { return new ConditionalOperatorFactory(fieldReference); @@ -53,7 +55,7 @@ public static ConditionalOperatorFactory when(String fieldReference) { * Take the value resulting from the given {@literal expression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link ConditionalOperatorFactory}. */ public static ConditionalOperatorFactory when(AggregationExpression expression) { return new ConditionalOperatorFactory(expression); @@ -63,7 +65,7 @@ public static ConditionalOperatorFactory when(AggregationExpression expression) * Take the value resulting from the given {@literal criteriaDefinition}. * * @param criteriaDefinition must not be {@literal null}. - * @return + * @return new instance of {@link ConditionalOperatorFactory}. */ public static ConditionalOperatorFactory when(CriteriaDefinition criteriaDefinition) { return new ConditionalOperatorFactory(criteriaDefinition); @@ -75,11 +77,11 @@ public static ConditionalOperatorFactory when(CriteriaDefinition criteriaDefinit * instances of undefined values or missing fields, returns the value of the replacement expression. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link IfNull.ThenBuilder} to create {@link IfNull}. */ public static IfNull.ThenBuilder ifNull(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return IfNull.ifNull(fieldReference); } @@ -89,11 +91,11 @@ public static IfNull.ThenBuilder ifNull(String fieldReference) { * instances of undefined values or missing fields, returns the value of the replacement expression. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link IfNull.ThenBuilder} to create {@link IfNull}. */ public static IfNull.ThenBuilder ifNull(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return IfNull.ifNull(expression); } @@ -103,7 +105,7 @@ public static IfNull.ThenBuilder ifNull(AggregationExpression expression) { * out of the control flow. * * @param conditions must not be {@literal null}. - * @return + * @return new instance of {@link Switch}. */ public static Switch switchCases(CaseOperator... conditions) { return Switch.switchCases(conditions); @@ -115,7 +117,7 @@ public static Switch switchCases(CaseOperator... conditions) { * out of the control flow. * * @param conditions must not be {@literal null}. - * @return + * @return new instance of {@link Switch}. */ public static Switch switchCases(List conditions) { return Switch.switchCases(conditions); @@ -136,7 +138,7 @@ public static class ConditionalOperatorFactory { */ public ConditionalOperatorFactory(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.fieldReference = fieldReference; this.expression = null; @@ -150,7 +152,7 @@ public ConditionalOperatorFactory(String fieldReference) { */ public ConditionalOperatorFactory(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); this.fieldReference = null; this.expression = expression; @@ -164,7 +166,7 @@ public ConditionalOperatorFactory(AggregationExpression expression) { */ public ConditionalOperatorFactory(CriteriaDefinition criteriaDefinition) { - Assert.notNull(criteriaDefinition, "CriteriaDefinition must not be null!"); + Assert.notNull(criteriaDefinition, "CriteriaDefinition must not be null"); this.fieldReference = null; this.expression = null; @@ -176,11 +178,11 @@ public ConditionalOperatorFactory(CriteriaDefinition criteriaDefinition) { * return expressions. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link OtherwiseBuilder} to create {@link Cond}. */ public OtherwiseBuilder then(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return createThenBuilder().then(value); } @@ -189,11 +191,11 @@ public OtherwiseBuilder then(Object value) { * return expressions. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link OtherwiseBuilder} to create {@link Cond}. */ public OtherwiseBuilder thenValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return createThenBuilder().then(expression); } @@ -202,14 +204,15 @@ public OtherwiseBuilder thenValueOf(AggregationExpression expression) { * return expressions. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link OtherwiseBuilder} to create {@link Cond}. */ public OtherwiseBuilder thenValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return createThenBuilder().then(fieldReference); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return createThenBuilder().thenValueOf(fieldReference); } + @SuppressWarnings("NullAway") private ThenBuilder createThenBuilder() { if (usesFieldRef()) { @@ -233,9 +236,9 @@ private boolean usesCriteriaDefinition() { * field references}, {@link AggregationExpression expressions}, values of simple MongoDB types or values that can be * converted to a simple MongoDB type. * + * @author Mark Paluch * @see https://docs.mongodb.com/manual/reference/operator/aggregation/ifNull/ - * @author Mark Paluch */ public static class IfNull implements AggregationExpression { @@ -251,13 +254,13 @@ private IfNull(Object condition, Object value) { /** * Creates new {@link IfNull}. * - * @param fieldReference the field to check for a {@literal null} value, field reference must not be {@literal null} - * . - * @return + * @param fieldReference the field to check for a {@literal null} value, field reference must not be + * {@literal null}. + * @return never {@literal null}. */ public static ThenBuilder ifNull(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new IfNullOperatorBuilder().ifNull(fieldReference); } @@ -266,42 +269,49 @@ public static ThenBuilder ifNull(String fieldReference) { * * @param expression the expression to check for a {@literal null} value, field reference must not be * {@literal null}. - * @return + * @return never {@literal null}. */ public static ThenBuilder ifNull(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new IfNullOperatorBuilder().ifNull(expression); } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { - List list = new ArrayList(); + List list = new ArrayList<>(); - if (condition instanceof Field) { - list.add(context.getReference((Field) condition).toString()); - } else if (condition instanceof AggregationExpression) { - list.add(((AggregationExpression) condition).toDocument(context)); + if (condition instanceof Collection collection) { + for (Object val : collection) { + list.add(mapCondition(val, context)); + } } else { - list.add(condition); + list.add(mapCondition(condition, context)); } list.add(resolve(value, context)); - return new Document("$ifNull", list); } + private Object mapCondition(Object condition, AggregationOperationContext context) { + + if (condition instanceof Field field) { + return context.getReference(field).toString(); + } else if (condition instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); + } else { + return condition; + } + } + + @SuppressWarnings("NullAway") private Object resolve(Object value, AggregationOperationContext context) { - if (value instanceof Field) { - return context.getReference((Field) value).toString(); - } else if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); + if (value instanceof Field field) { + return context.getReference(field).toString(); + } else if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); } else if (value instanceof Document) { return value; } @@ -324,33 +334,53 @@ public interface IfNullBuilder { /** * @param expression the expression to check for a {@literal null} value, field name must not be {@literal null} * or empty. - * @return the {@link ThenBuilder} + * @return the {@link ThenBuilder}. */ ThenBuilder ifNull(AggregationExpression expression); } + /** + * @author Christoph Strobl + * @since 3.3 + */ + public interface OrBuilder { + + /** + * @param fieldReference the field to check for a {@literal null} value, field reference must not be + * {@literal null}. + * @return the {@link ThenBuilder} + */ + ThenBuilder orIfNull(String fieldReference); + + /** + * @param expression the expression to check for a {@literal null} value, + * @return the {@link ThenBuilder}. + */ + ThenBuilder orIfNull(AggregationExpression expression); + } + /** * @author Mark Paluch */ - public interface ThenBuilder { + public interface ThenBuilder extends OrBuilder { /** * @param value the value to be used if the {@code $ifNull} condition evaluates {@literal true}. Can be a * {@link Document}, a value that is supported by MongoDB or a value that can be converted to a MongoDB * representation but must not be {@literal null}. - * @return + * @return new instance of {@link IfNull}. */ IfNull then(Object value); /** * @param fieldReference the field holding the replacement value, must not be {@literal null}. - * @return + * @return new instance of {@link IfNull}. */ IfNull thenValueOf(String fieldReference); /** * @param expression the expression yielding to the replacement value, must not be {@literal null}. - * @return + * @return new instance of {@link IfNull}. */ IfNull thenValueOf(AggregationExpression expression); } @@ -362,9 +392,11 @@ public interface ThenBuilder { */ static final class IfNullOperatorBuilder implements IfNullBuilder, ThenBuilder { - private @Nullable Object condition; + private List conditions; - private IfNullOperatorBuilder() {} + private IfNullOperatorBuilder() { + conditions = new ArrayList<>(); + } /** * Creates a new builder for {@link IfNull}. @@ -375,50 +407,52 @@ public static IfNullOperatorBuilder newBuilder() { return new IfNullOperatorBuilder(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.IfNullBuilder#ifNull(java.lang.String) - */ + @Contract("_ -> this") public ThenBuilder ifNull(String fieldReference) { - Assert.hasText(fieldReference, "FieldReference name must not be null or empty!"); - this.condition = Fields.field(fieldReference); + Assert.hasText(fieldReference, "FieldReference name must not be null or empty"); + this.conditions.add(Fields.field(fieldReference)); return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.IfNullBuilder#ifNull(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override + @Contract("_ -> this") public ThenBuilder ifNull(AggregationExpression expression) { - Assert.notNull(expression, "AggregationExpression name must not be null or empty!"); - this.condition = expression; + Assert.notNull(expression, "AggregationExpression name must not be null or empty"); + this.conditions.add(expression); return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.ThenBuilder#then(java.lang.Object) - */ + @Override + @Contract("_ -> this") + public ThenBuilder orIfNull(String fieldReference) { + return ifNull(fieldReference); + } + + @Override + @Contract("_ -> this") + public ThenBuilder orIfNull(AggregationExpression expression) { + return ifNull(expression); + } + + @Contract("_ -> new") public IfNull then(Object value) { - return new IfNull(condition, value); + return new IfNull(conditions, value); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.ThenBuilder#thenValueOf(java.lang.String) - */ + @Contract("_ -> new") public IfNull thenValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); - return new IfNull(condition, Fields.field(fieldReference)); + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new IfNull(conditions, Fields.field(fieldReference)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.IfNull.ThenBuilder#thenValueOf(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ + @Contract("_ -> new") public IfNull thenValueOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); - return new IfNull(condition, expression); + Assert.notNull(expression, "Expression must not be null"); + return new IfNull(conditions, expression); } } } @@ -446,7 +480,7 @@ protected String getMongoMethod() { */ public static Switch switchCases(CaseOperator... conditions) { - Assert.notNull(conditions, "Conditions must not be null!"); + Assert.notNull(conditions, "Conditions must not be null"); return switchCases(Arrays.asList(conditions)); } @@ -457,10 +491,17 @@ public static Switch switchCases(CaseOperator... conditions) { */ public static Switch switchCases(List conditions) { - Assert.notNull(conditions, "Conditions must not be null!"); - return new Switch(Collections. singletonMap("branches", new ArrayList(conditions))); + Assert.notNull(conditions, "Conditions must not be null"); + return new Switch(Collections.singletonMap("branches", new ArrayList<>(conditions))); } + /** + * Set the default value. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Switch}. + */ + @Contract("_ -> new") public Switch defaultTo(Object value) { return new Switch(append("default", value)); } @@ -481,31 +522,28 @@ private CaseOperator(AggregationExpression when, Object then) { public static ThenBuilder when(final AggregationExpression condition) { - Assert.notNull(condition, "Condition must not be null!"); + Assert.notNull(condition, "Condition must not be null"); return new ThenBuilder() { @Override public CaseOperator then(Object value) { - Assert.notNull(value, "Value must not be null!"); + Assert.notNull(value, "Value must not be null"); return new CaseOperator(condition, value); } }; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { Document dbo = new Document("case", when.toDocument(context)); - if (then instanceof AggregationExpression) { - dbo.put("then", ((AggregationExpression) then).toDocument(context)); - } else if (then instanceof Field) { - dbo.put("then", context.getReference((Field) then).toString()); + if (then instanceof AggregationExpression aggregationExpression) { + dbo.put("then", aggregationExpression.toDocument(context)); + } else if (then instanceof Field field) { + dbo.put("then", context.getReference(field).toString()); } else { dbo.put("then", then); } @@ -522,7 +560,7 @@ public interface ThenBuilder { * Set the then {@literal value}. * * @param value must not be {@literal null}. - * @return + * @return new instance of {@link CaseOperator}. */ CaseOperator then(Object value); } @@ -536,10 +574,10 @@ public interface ThenBuilder { * {@link AggregationExpression expressions}, values of simple MongoDB types or values that can be converted to a * simple MongoDB type. * - * @see https://docs.mongodb.com/manual/reference/operator/aggregation/cond/ * @author Mark Paluch * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/cond/ */ public static class Cond implements AggregationExpression { @@ -571,9 +609,9 @@ private Cond(CriteriaDefinition condition, Object thenValue, Object otherwiseVal private Cond(Object condition, Object thenValue, Object otherwiseValue) { - Assert.notNull(condition, "Condition must not be null!"); - Assert.notNull(thenValue, "Then value must not be null!"); - Assert.notNull(otherwiseValue, "Otherwise value must not be null!"); + Assert.notNull(condition, "Condition must not be null"); + Assert.notNull(thenValue, "Then value must not be null"); + Assert.notNull(otherwiseValue, "Otherwise value must not be null"); assertNotBuilder(condition, "Condition"); assertNotBuilder(thenValue, "Then value"); @@ -584,10 +622,6 @@ private Cond(Object condition, Object thenValue, Object otherwiseValue) { this.otherwiseValue = otherwiseValue; } - /* - * (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationExpression#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { @@ -600,14 +634,15 @@ public Document toDocument(AggregationOperationContext context) { return new Document("$cond", condObject); } + @SuppressWarnings("NullAway") private Object resolveValue(AggregationOperationContext context, Object value) { if (value instanceof Document || value instanceof Field) { return resolve(context, value); } - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); } return context.getMappedObject(new Document("$set", value)).get("$set"); @@ -619,27 +654,24 @@ private Object resolveCriteria(AggregationOperationContext context, Object value return resolve(context, value); } - if (value instanceof AggregationExpression) { - return ((AggregationExpression) value).toDocument(context); + if (value instanceof AggregationExpression aggregationExpression) { + return aggregationExpression.toDocument(context); } - if (value instanceof CriteriaDefinition) { - - Document mappedObject = context.getMappedObject(((CriteriaDefinition) value).getCriteriaObject()); - List clauses = new ArrayList(); - - clauses.addAll(getClauses(context, mappedObject)); + if (value instanceof CriteriaDefinition criteriaDefinition) { + Document mappedObject = context.getMappedObject(criteriaDefinition.getCriteriaObject()); + List clauses = getClauses(context, mappedObject); return clauses.size() == 1 ? clauses.get(0) : clauses; } throw new InvalidDataAccessApiUsageException( - String.format("Invalid value in condition. Supported: Document, Field references, Criteria, got: %s", value)); + String.format("Invalid value in condition; Supported: Document, Field references, Criteria, got: %s", value)); } private List getClauses(AggregationOperationContext context, Document mappedObject) { - List clauses = new ArrayList(); + List clauses = new ArrayList<>(); for (String key : mappedObject.keySet()) { @@ -652,22 +684,20 @@ private List getClauses(AggregationOperationContext context, Document ma private List getClauses(AggregationOperationContext context, String key, Object predicate) { - List clauses = new ArrayList(); + List clauses = new ArrayList<>(); + + if (predicate instanceof List predicates) { - if (predicate instanceof List) { + List args = new ArrayList<>(predicates.size()); - List args = new ArrayList(); - for (Object clause : (List) predicate) { - if (clause instanceof Document) { - args.addAll(getClauses(context, (Document) clause)); + for (Object clause : predicates) { + if (clause instanceof Document document) { + args.addAll(getClauses(context, document)); } } clauses.add(new Document(key, args)); - - } else if (predicate instanceof Document) { - - Document nested = (Document) predicate; + } else if (predicate instanceof Document nested) { for (String s : nested.keySet()) { @@ -675,15 +705,14 @@ private List getClauses(AggregationOperationContext context, String key, continue; } - List args = new ArrayList(); + List args = new ArrayList<>(2); args.add("$" + key); args.add(nested.get(s)); clauses.add(new Document(s, args)); } - } else if (!isKeyword(key)) { - List args = new ArrayList(); + List args = new ArrayList<>(2); args.add("$" + key); args.add(predicate); clauses.add(new Document("$eq", args)); @@ -704,8 +733,8 @@ private boolean isKeyword(String candidate) { private Object resolve(AggregationOperationContext context, Object value) { - if (value instanceof Document) { - return context.getMappedObject((Document) value); + if (value instanceof Document document) { + return context.getMappedObject(document); } return context.getReference((Field) value).toString(); @@ -868,111 +897,97 @@ public static ConditionalExpressionBuilder newBuilder() { return new ConditionalExpressionBuilder(); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.WhenBuilder#when(org.bson.Document) - */ @Override + @Contract("_ -> this") public ConditionalExpressionBuilder when(Document booleanExpression) { - Assert.notNull(booleanExpression, "'Boolean expression' must not be null!"); + Assert.notNull(booleanExpression, "'Boolean expression' must not be null"); this.condition = booleanExpression; return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.WhenBuilder#when(org.springframework.data.mongodb.core.query.CriteriaDefinition) - */ @Override + @Contract("_ -> this") public ThenBuilder when(CriteriaDefinition criteria) { - Assert.notNull(criteria, "Criteria must not be null!"); + Assert.notNull(criteria, "Criteria must not be null"); this.condition = criteria; return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.WhenBuilder#when(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override + @Contract("_ -> this") public ThenBuilder when(AggregationExpression expression) { - Assert.notNull(expression, "AggregationExpression field must not be null!"); + Assert.notNull(expression, "AggregationExpression field must not be null"); this.condition = expression; return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.WhenBuilder#when(java.lang.String) - */ @Override + @Contract("_ -> this") public ThenBuilder when(String booleanField) { - Assert.hasText(booleanField, "Boolean field name must not be null or empty!"); + Assert.hasText(booleanField, "Boolean field name must not be null or empty"); this.condition = Fields.field(booleanField); return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.ThenBuilder#then(java.lang.Object) - */ @Override + @Contract("_ -> this") public OtherwiseBuilder then(Object thenValue) { - Assert.notNull(thenValue, "Then-value must not be null!"); + Assert.notNull(thenValue, "Then-value must not be null"); this.thenValue = thenValue; return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.ThenBuilder#thenValueOf(java.lang.String) - */ @Override + @Contract("_ -> this") public OtherwiseBuilder thenValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); this.thenValue = Fields.field(fieldReference); return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.ThenBuilder#thenValueOf(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override + @Contract("_ -> this") public OtherwiseBuilder thenValueOf(AggregationExpression expression) { - Assert.notNull(expression, "AggregationExpression must not be null!"); + Assert.notNull(expression, "AggregationExpression must not be null"); this.thenValue = expression; return this; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.OtherwiseBuilder#otherwise(java.lang.Object) - */ @Override + @Contract("_ -> new") public Cond otherwise(Object otherwiseValue) { - Assert.notNull(otherwiseValue, "Value must not be null!"); + Assert.notNull(otherwiseValue, "Value must not be null"); + Assert.notNull(condition, "Condition value needs to be set first"); + Assert.notNull(thenValue, "Then value needs to be set first"); return new Cond(condition, thenValue, otherwiseValue); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.OtherwiseBuilder#otherwiseValueOf(java.lang.String) - */ @Override + @Contract("_ -> new") public Cond otherwiseValueOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); + Assert.notNull(condition, "Condition value needs to be set first"); + Assert.notNull(thenValue, "Then value needs to be set first"); return new Cond(condition, thenValue, Fields.field(fieldReference)); } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.ConditionalOperators.Cond.OtherwiseBuilder#otherwiseValueOf(org.springframework.data.mongodb.core.aggregation.AggregationExpression) - */ @Override + @Contract("_ -> new") public Cond otherwiseValueOf(AggregationExpression expression) { - Assert.notNull(expression, "AggregationExpression must not be null!"); + Assert.notNull(expression, "AggregationExpression must not be null"); + Assert.notNull(condition, "Condition value needs to be set first"); + Assert.notNull(thenValue, "Then value needs to be set first"); return new Cond(condition, thenValue, expression); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java new file mode 100644 index 0000000000..35a6ad061c --- /dev/null +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/ConvertOperators.java @@ -0,0 +1,756 @@ +/* + * Copyright 2018-2025 the original author or authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.springframework.data.mongodb.core.aggregation; + +import java.util.Collections; + +import org.jspecify.annotations.Nullable; +import org.springframework.data.mongodb.core.schema.JsonSchemaObject.Type; +import org.springframework.util.Assert; + +/** + * Gateway to {@literal convert} aggregation operations. + * + * @author Christoph Strobl + * @since 2.1 + */ +public class ConvertOperators { + + /** + * Take the field referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return + */ + public static ConvertOperatorFactory valueOf(String fieldReference) { + return new ConvertOperatorFactory(fieldReference); + } + + /** + * Take the value resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return + */ + public static ConvertOperatorFactory valueOf(AggregationExpression expression) { + return new ConvertOperatorFactory(expression); + } + + /** + * @author Christoph Strobl + */ + public static class ConvertOperatorFactory { + + private final @Nullable String fieldReference; + private final @Nullable AggregationExpression expression; + + /** + * Creates new {@link ConvertOperatorFactory} for given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + */ + public ConvertOperatorFactory(String fieldReference) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + + this.fieldReference = fieldReference; + this.expression = null; + } + + /** + * Creates new {@link ConvertOperatorFactory} for given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + */ + public ConvertOperatorFactory(AggregationExpression expression) { + + Assert.notNull(expression, "Expression must not be null"); + + this.fieldReference = null; + this.expression = expression; + } + + /** + * Creates new {@link Convert aggregation expression} that takes the associated value and converts it into the type + * specified by the given {@code stringTypeIdentifier}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param stringTypeIdentifier must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert convertTo(String stringTypeIdentifier) { + return createConvert().to(stringTypeIdentifier); + } + + /** + * Creates new {@link Convert aggregation expression} that takes the associated value and converts it into the type + * specified by the given {@code numericTypeIdentifier}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param numericTypeIdentifier must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert convertTo(int numericTypeIdentifier) { + return createConvert().to(numericTypeIdentifier); + } + + /** + * Creates new {@link Convert aggregation expression} that takes the associated value and converts it into the type + * specified by the given {@link Type}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param type must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert convertTo(Type type) { + return createConvert().to(type); + } + + /** + * Creates new {@link Convert aggregation expression} that takes the associated value and converts it into the type + * specified by the value of the given {@link Field field reference}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert convertToTypeOf(String fieldReference) { + return createConvert().toTypeOf(fieldReference); + } + + /** + * Creates new {@link Convert aggregation expression} that takes the associated value and converts it into the type + * specified by the given {@link AggregationExpression expression}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert convertToTypeOf(AggregationExpression expression) { + return createConvert().toTypeOf(expression); + } + + /** + * Creates new {@link ToBool aggregation expression} for {@code $toBool} that converts a value to boolean. Shorthand + * for {@link #convertTo(String) #convertTo("bool")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToBool}. + */ + public ToBool convertToBoolean() { + return ToBool.toBoolean(valueObject()); + } + + /** + * Creates new {@link ToDate aggregation expression} for {@code $toDate} that converts a value to a date. Shorthand + * for {@link #convertTo(String) #convertTo("date")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToDate}. + */ + public ToDate convertToDate() { + return ToDate.toDate(valueObject()); + } + + /** + * Creates new {@link ToDecimal aggregation expression} for {@code $toDecimal} that converts a value to a decimal. + * Shorthand for {@link #convertTo(String) #convertTo("decimal")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToDecimal}. + */ + public ToDecimal convertToDecimal() { + return ToDecimal.toDecimal(valueObject()); + } + + /** + * Creates new {@link ToDouble aggregation expression} for {@code $toDouble} that converts a value to a decimal. + * Shorthand for {@link #convertTo(String) #convertTo("double")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToDouble}. + */ + public ToDouble convertToDouble() { + return ToDouble.toDouble(valueObject()); + } + + /** + * Creates new {@link ToInt aggregation expression} for {@code $toInt} that converts a value to an int. Shorthand + * for {@link #convertTo(String) #convertTo("int")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToInt}. + */ + public ToInt convertToInt() { + return ToInt.toInt(valueObject()); + } + + /** + * Creates new {@link ToInt aggregation expression} for {@code $toLong} that converts a value to a long. Shorthand + * for {@link #convertTo(String) #convertTo("long")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToInt}. + */ + public ToLong convertToLong() { + return ToLong.toLong(valueObject()); + } + + /** + * Creates new {@link ToInt aggregation expression} for {@code $toObjectId} that converts a value to a objectId. Shorthand + * for {@link #convertTo(String) #convertTo("objectId")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToInt}. + */ + public ToObjectId convertToObjectId() { + return ToObjectId.toObjectId(valueObject()); + } + + /** + * Creates new {@link ToInt aggregation expression} for {@code $toString} that converts a value to a string. Shorthand + * for {@link #convertTo(String) #convertTo("string")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @return new instance of {@link ToInt}. + */ + public ToString convertToString() { + return ToString.toString(valueObject()); + } + + /** + * {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to + * radians. + * + * @return new instance of {@link DegreesToRadians}. + * @since 3.3 + */ + public DegreesToRadians convertDegreesToRadians() { + return DegreesToRadians.degreesToRadians(valueObject()); + } + + @SuppressWarnings("NullAway") + private Convert createConvert() { + return usesFieldRef() ? Convert.convertValueOf(fieldReference) : Convert.convertValueOf(expression); + } + + @SuppressWarnings("NullAway") + private Object valueObject() { + return usesFieldRef() ? Fields.field(fieldReference) : expression; + } + + private boolean usesFieldRef() { + return fieldReference != null; + } + } + + /** + * {@link AggregationExpression} for {@code $convert} that converts a value to a specified type.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/convert/ + * @since 2.1 + */ + public static class Convert extends AbstractAggregationExpression { + + private Convert(Object value) { + super(value); + } + + /** + * Creates new {@link Convert} using the given value for the {@literal input} attribute. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public static Convert convertValue(Object value) { + return new Convert(Collections.singletonMap("input", value)); + } + + /** + * Creates new {@link Convert} using the value of the provided {@link Field fieldReference} as {@literal input} + * value. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public static Convert convertValueOf(String fieldReference) { + return convertValue(Fields.field(fieldReference)); + } + + /** + * Creates new {@link Convert} using the result of the provided {@link AggregationExpression expression} as + * {@literal input} value. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public static Convert convertValueOf(AggregationExpression expression) { + return convertValue(expression); + } + + /** + * Specify the conversion target type via its {@link String} representation. + *
            + *
          • double
          • + *
          • string
          • + *
          • objectId
          • + *
          • bool
          • + *
          • date
          • + *
          • int
          • + *
          • long
          • + *
          • decimal
          • + *
          + * + * @param stringTypeIdentifier must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert to(String stringTypeIdentifier) { + return new Convert(append("to", stringTypeIdentifier)); + } + + /** + * Specify the conversion target type via its numeric representation. + *
          + *
          1
          + *
          double
          + *
          2
          + *
          string
          + *
          7
          + *
          objectId
          + *
          8
          + *
          bool
          + *
          9
          + *
          date
          + *
          16
          + *
          int
          + *
          18
          + *
          long
          + *
          19
          + *
          decimal
          + *
          + * + * @param numericTypeIdentifier must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert to(int numericTypeIdentifier) { + return new Convert(append("to", numericTypeIdentifier)); + } + + /** + * Specify the conversion target type. + * + * @param type must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert to(Type type) { + + String typeString = Type.BOOLEAN.equals(type) ? "bool" : type.value().toString(); + return to(typeString); + } + + /** + * Specify the conversion target type via the value of the given field. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert toTypeOf(String fieldReference) { + return new Convert(append("to", Fields.field(fieldReference))); + } + + /** + * Specify the conversion target type via the value of the given {@link AggregationExpression expression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert toTypeOf(AggregationExpression expression) { + return new Convert(append("to", expression)); + } + + /** + * Optionally specify the value to return on encountering an error during conversion. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onErrorReturn(Object value) { + return new Convert(append("onError", value)); + } + + /** + * Optionally specify the field holding the value to return on encountering an error during conversion. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onErrorReturnValueOf(String fieldReference) { + return onErrorReturn(Fields.field(fieldReference)); + } + + /** + * Optionally specify the expression to evaluate and return on encountering an error during conversion. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onErrorReturnValueOf(AggregationExpression expression) { + return onErrorReturn(expression); + } + + /** + * Optionally specify the value to return when the input is {@literal null} or missing. + * + * @param value must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onNullReturn(Object value) { + return new Convert(append("onNull", value)); + } + + /** + * Optionally specify the field holding the value to return when the input is {@literal null} or missing. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onNullReturnValueOf(String fieldReference) { + return onNullReturn(Fields.field(fieldReference)); + } + + /** + * Optionally specify the expression to evaluate and return when the input is {@literal null} or missing. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link Convert}. + */ + public Convert onNullReturnValueOf(AggregationExpression expression) { + return onNullReturn(expression); + } + + @Override + protected String getMongoMethod() { + return "$convert"; + } + } + + /** + * {@link AggregationExpression} for {@code $toBool} that converts a value to {@literal boolean}. Shorthand for + * {@link Convert#to(String) Convert#to("bool")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toBool/ + * @since 2.1 + */ + public static class ToBool extends AbstractAggregationExpression { + + private ToBool(Object value) { + super(value); + } + + /** + * Creates new {@link ToBool} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToBool}. + */ + public static ToBool toBoolean(Object value) { + return new ToBool(value); + } + + @Override + protected String getMongoMethod() { + return "$toBool"; + } + } + + /** + * {@link AggregationExpression} for {@code $toDate} that converts a value to {@literal date}. Shorthand for + * {@link Convert#to(String) Convert#to("date")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toDate/ + * @since 2.1 + */ + public static class ToDate extends AbstractAggregationExpression { + + private ToDate(Object value) { + super(value); + } + + /** + * Creates new {@link ToDate} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToDate}. + */ + public static ToDate toDate(Object value) { + return new ToDate(value); + } + + @Override + protected String getMongoMethod() { + return "$toDate"; + } + } + + /** + * {@link AggregationExpression} for {@code $toDecimal} that converts a value to {@literal decimal}. Shorthand for + * {@link Convert#to(String) Convert#to("decimal")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toDecimal/ + * @since 2.1 + */ + public static class ToDecimal extends AbstractAggregationExpression { + + private ToDecimal(Object value) { + super(value); + } + + /** + * Creates new {@link ToDecimal} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToDecimal}. + */ + public static ToDecimal toDecimal(Object value) { + return new ToDecimal(value); + } + + @Override + protected String getMongoMethod() { + return "$toDecimal"; + } + } + + /** + * {@link AggregationExpression} for {@code $toDouble} that converts a value to {@literal double}. Shorthand for + * {@link Convert#to(String) Convert#to("double")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toDouble/ + * @since 2.1 + */ + public static class ToDouble extends AbstractAggregationExpression { + + private ToDouble(Object value) { + super(value); + } + + /** + * Creates new {@link ToDouble} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToDouble}. + */ + public static ToDouble toDouble(Object value) { + return new ToDouble(value); + } + + @Override + protected String getMongoMethod() { + return "$toDouble"; + } + } + + /** + * {@link AggregationExpression} for {@code $toInt} that converts a value to {@literal integer}. Shorthand for + * {@link Convert#to(String) Convert#to("int")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toInt/ + * @since 2.1 + */ + public static class ToInt extends AbstractAggregationExpression { + + private ToInt(Object value) { + super(value); + } + + /** + * Creates new {@link ToInt} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToInt}. + */ + public static ToInt toInt(Object value) { + return new ToInt(value); + } + + @Override + protected String getMongoMethod() { + return "$toInt"; + } + } + + /** + * {@link AggregationExpression} for {@code $toLong} that converts a value to {@literal long}. Shorthand for + * {@link Convert#to(String) Convert#to("long")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toLong/ + * @since 2.1 + */ + public static class ToLong extends AbstractAggregationExpression { + + private ToLong(Object value) { + super(value); + } + + /** + * Creates new {@link ToLong} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToLong}. + */ + public static ToLong toLong(Object value) { + return new ToLong(value); + } + + @Override + protected String getMongoMethod() { + return "$toLong"; + } + } + + /** + * {@link AggregationExpression} for {@code $toObjectId} that converts a value to {@literal objectId}. Shorthand for + * {@link Convert#to(String) Convert#to("objectId")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toObjectId/ + * @since 2.1 + */ + public static class ToObjectId extends AbstractAggregationExpression { + + private ToObjectId(Object value) { + super(value); + } + + /** + * Creates new {@link ToObjectId} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToObjectId}. + */ + public static ToObjectId toObjectId(Object value) { + return new ToObjectId(value); + } + + @Override + protected String getMongoMethod() { + return "$toObjectId"; + } + } + + /** + * {@link AggregationExpression} for {@code $toString} that converts a value to {@literal string}. Shorthand for + * {@link Convert#to(String) Convert#to("string")}.
          + * NOTE: Requires MongoDB 4.0 or later. + * + * @author Christoph Strobl + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/toString/ + * @since 2.1 + */ + public static class ToString extends AbstractAggregationExpression { + + private ToString(Object value) { + super(value); + } + + /** + * Creates new {@link ToString} using the given value as input. + * + * @param value must not be {@literal null}. + * @return new instance of {@link ToString}. + */ + public static ToString toString(Object value) { + return new ToString(value); + } + + @Override + protected String getMongoMethod() { + return "$toString"; + } + } + + /** + * {@link AggregationExpression} for {@code $degreesToRadians} that converts an input value measured in degrees to radians. + * + * @author Christoph Strobl + * @since 3.3 + */ + public static class DegreesToRadians extends AbstractAggregationExpression { + + private DegreesToRadians(Object value) { + super(value); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the value of the given field, measured in degrees, to radians. + * + * @param fieldName must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadiansOf(String fieldName) { + return degreesToRadians(Fields.field(fieldName)); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the result of the given {@link AggregationExpression expression}, measured in degrees, to radians. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadiansOf(AggregationExpression expression) { + return degreesToRadians(expression); + } + + /** + * Create a new instance of {@link DegreesToRadians} that converts the given value, measured in degrees, to radians. + * + * @param value must not be {@literal null}. + * @return new instance of {@link DegreesToRadians}. + */ + public static DegreesToRadians degreesToRadians(Object value) { + return new DegreesToRadians(value); + } + + @Override + protected String getMongoMethod() { + return "$degreesToRadians"; + } + } +} diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java index 8a1c953440..6a6108f832 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/CountOperation.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018 the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -24,8 +24,7 @@ * We recommend to use the static factory method {@link Aggregation#count()} instead of creating instances of this class * directly. * - * @see https://docs.mongodb.com/manual/reference/operator/aggregation/count/ + * @see https://docs.mongodb.com/manual/reference/operator/aggregation/count/ * @author Mark Paluch * @since 1.10 */ @@ -34,27 +33,26 @@ public class CountOperation implements FieldsExposingAggregationOperation { private final String fieldName; /** - * Creates a new {@link CountOperation} given the {@link fieldName} field name. + * Creates a new {@link CountOperation} given the {@literal fieldName} field name. * - * @param asFieldName must not be {@literal null} or empty. + * @param fieldName must not be {@literal null} or empty. */ public CountOperation(String fieldName) { - Assert.hasText(fieldName, "Field name must not be null or empty!"); + Assert.hasText(fieldName, "Field name must not be null or empty"); this.fieldName = fieldName; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.AggregationOperation#toDocument(org.springframework.data.mongodb.core.aggregation.AggregationOperationContext) - */ @Override public Document toDocument(AggregationOperationContext context) { - return new Document("$count", fieldName); + return new Document(getOperator(), fieldName); + } + + @Override + public String getOperator() { + return "$count"; } - /* (non-Javadoc) - * @see org.springframework.data.mongodb.core.aggregation.FieldsExposingAggregationOperation#getFields() - */ @Override public ExposedFields getFields() { return ExposedFields.from(new ExposedField(fieldName, true)); @@ -71,7 +69,7 @@ public static class CountOperationBuilder { * Returns the finally to be applied {@link CountOperation} with the given alias. * * @param fieldName must not be {@literal null} or empty. - * @return + * @return new instance of {@link CountOperation}. */ public CountOperation as(String fieldName) { return new CountOperation(fieldName); diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DataTypeOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DataTypeOperators.java index 9a83753a17..26a85bf2c3 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DataTypeOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DataTypeOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -30,7 +30,7 @@ public class DataTypeOperators { * Return the BSON data type of the given {@literal field}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link Type}. */ public static Type typeOf(String fieldReference) { return Type.typeOf(fieldReference); @@ -56,11 +56,11 @@ protected String getMongoMethod() { * Creates new {@link Type}. * * @param field must not be {@literal null}. - * @return + * @return new instance of {@link Type}. */ public static Type typeOf(String field) { - Assert.notNull(field, "Field must not be null!"); + Assert.notNull(field, "Field must not be null"); return new Type(Fields.field(field)); } } diff --git a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java index a3fd38a0a2..7bf8a231ff 100644 --- a/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java +++ b/spring-data-mongodb/src/main/java/org/springframework/data/mongodb/core/aggregation/DateOperators.java @@ -1,11 +1,11 @@ /* - * Copyright 2016-2018. the original author or authors. + * Copyright 2016-2025 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,13 +15,22 @@ */ package org.springframework.data.mongodb.core.aggregation; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoUnit; import java.util.Collections; +import java.util.HashMap; import java.util.LinkedHashMap; +import java.util.Locale; import java.util.Map; +import java.util.TimeZone; +import java.util.concurrent.TimeUnit; -import org.springframework.lang.Nullable; +import org.jspecify.annotations.Nullable; +import org.springframework.lang.Contract; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; +import org.springframework.util.StringUtils; /** * Gateway to {@literal Date} aggregation operations. @@ -36,29 +45,54 @@ public class DateOperators { * Take the date referenced by given {@literal fieldReference}. * * @param fieldReference must not be {@literal null}. - * @return + * @return new instance of {@link DateOperatorFactory}. */ public static DateOperatorFactory dateOf(String fieldReference) { - Assert.notNull(fieldReference, "FieldReference must not be null!"); + Assert.notNull(fieldReference, "FieldReference must not be null"); return new DateOperatorFactory(fieldReference); } + /** + * Take the date referenced by given {@literal fieldReference}. + * + * @param fieldReference must not be {@literal null}. + * @return new instance of {@link DateOperatorFactory}. + * @since 3.3 + */ + public static DateOperatorFactory zonedDateOf(String fieldReference, Timezone timezone) { + + Assert.notNull(fieldReference, "FieldReference must not be null"); + return new DateOperatorFactory(fieldReference).withTimezone(timezone); + } + /** * Take the date resulting from the given {@link AggregationExpression}. * * @param expression must not be {@literal null}. - * @return + * @return new instance of {@link DateOperatorFactory}. */ public static DateOperatorFactory dateOf(AggregationExpression expression) { - Assert.notNull(expression, "Expression must not be null!"); + Assert.notNull(expression, "Expression must not be null"); return new DateOperatorFactory(expression); } /** - * Take the given value as date. - *

          + * Take the date resulting from the given {@link AggregationExpression}. + * + * @param expression must not be {@literal null}. + * @return new instance of {@link DateOperatorFactory}. + * @since 3.3 + */ + public static DateOperatorFactory zonedDateOf(AggregationExpression expression, Timezone timezone) { + + Assert.notNull(expression, "Expression must not be null"); + return new DateOperatorFactory(expression).withTimezone(timezone); + } + + /** + * Take the given value as date.
          * This can be one of: *